Skip to content

Commit

Permalink
Merge pull request #48 from RWTH-EBC/43-Improvement-of-sensitivity-an…
Browse files Browse the repository at this point in the history
…alysis

43 improvement of sensitivity analysis
  • Loading branch information
HvanderStok authored Jan 27, 2024
2 parents 6bfe1d6 + 2cc394d commit f937d3d
Show file tree
Hide file tree
Showing 62 changed files with 12,441 additions and 232 deletions.
3 changes: 2 additions & 1 deletion .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,5 @@ exclude_lines =
if __name__ == '__main__':
raise ImportError
except Exception as error:
except ImportError
except ImportError
plt.show()
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
*.bak
.project
.pydevproject
aixcalibuha/examples/testzone
examples/testzone
examples/results

# pip install #
#####################
Expand Down
3 changes: 2 additions & 1 deletion .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ stages:
variables:
COVERAGE_TYPE: "FMU"
PAGES_BRANCH: master
EXLUDE_PYTHON: 37
EXCLUDE_PYTHON: 37
GIT_REPO: RWTH-EBC/AixCaliBuHA
PYTHON_PACKAGE_NAME: "aixcalibuha"
PYLINT_INPUT: "aixcalibuha"
PYTHON_VERSION: "registry.git.rwth-aachen.de/ebc/ebc_all/gitlab_ci/templates:python_3.9"
TEST_ENGINE: "unittest"

Expand Down
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,3 +51,12 @@

- **v0.3.1**
- Issue 41: Fix logging and add kwarg

- **v1.0.0**
- Issue 43: Improvement of sensitivity analysis
- Enables verbose sensitivity analysis and the reuse of simulations
- It is now possible to use verbose sensitivity analysis for an automatic selection of tuner parameters
- Enables multiprocessing for the entire sensitivity process
- Sensitivity analysis is now usable for large models and data
- Add time dependent sensitivity analysis
- Ends support for python 3.7
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
![E.ON EBC RWTH Aachen University](./docs/EBC_Logo.png)

[![DOI](https://joss.theoj.org/papers/10.21105/joss.03861/status.svg)](https://doi.org/10.21105/joss.03861)
[![pylint](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/pylint/pylint.svg)](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/pylint/pylint.html)
[![documentation](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/docs/doc.svg)](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/docs/index.html)
[![coverage](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/coverage/badge.svg)](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/coverage)
[![pylint](https://rwth-ebc.github.io/AixCaliBuHA/master/pylint/pylint.svg)](https://rwth-ebc.github.io/AixCaliBuHA/master/pylint/pylint.html)
[![documentation](https://rwth-ebc.github.io/AixCaliBuHA/master/docs/doc.svg)](https://rwth-ebc.github.io/AixCaliBuHA/master/docs/index.html)
[![coverage](https://rwth-ebc.github.io/AixCaliBuHA/master/coverage/badge.svg)](https://rwth-ebc.github.io/AixCaliBuHA/master/coverage)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
[![build](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/build/build.svg)](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/build/build.svg)
[![build](https://rwth-ebc.github.io/AixCaliBuHA/master/build/build.svg)](https://rwth-ebc.github.io/AixCaliBuHA/master/build/build.svg)

# AixCaliBuHA

Expand All @@ -16,7 +16,7 @@ This framework automates the process of calibrating models used in Building
and HVAC Simulations.

# Key features
- Performing a **Sensitivity Analysis** to discover tuner parameters for the calibration
- Performing a **Sensitivity Analysis** to analyze your model and discover tuner parameters for the calibration
- **Calibration** of a given model based on the tuner parameters, the calibration classes and specified goals to evaluate the objective function of the underlying optimization

# Installation
Expand Down Expand Up @@ -143,7 +143,7 @@ Please use the following metadata to cite `AixCaliBuHA` in your research:
```

# Documentation
Visit our official [Documentation](https://ebc.pages.rwth-aachen.de/EBC_all/github_ci/AixCaliBuHA/master/docs).
Visit our official [Documentation](https://rwth-ebc.github.io/AixCaliBuHA/master/docs).

# Problems?
Please [raise an issue here](https://github.com/RWTH-EBC/AixCaliBuHA/issues/new).
6 changes: 4 additions & 2 deletions aixcalibuha/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,7 @@
"""
from .data_types import CalibrationClass, TunerParas, Goals
from .calibration import Calibrator, MultipleClassCalibrator
from .sensitivity_analysis import SobolAnalyzer, MorrisAnalyzer
__version__ = "0.3.1"
from .sensitivity_analysis import SobolAnalyzer, MorrisAnalyzer, FASTAnalyzer, PAWNAnalyzer, \
plotting

__version__ = "1.0.0"
48 changes: 34 additions & 14 deletions aixcalibuha/calibration/calibrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from typing import Dict
from copy import copy
import numpy as np
import pandas as pd
from ebcpy import data_types, Optimizer
from ebcpy.simulationapi import SimulationAPI
from aixcalibuha.utils import visualizer, MaxIterationsReached
Expand Down Expand Up @@ -95,7 +96,7 @@ def __init__(self,
calibration_class: CalibrationClass,
**kwargs):
"""Instantiate instance attributes"""
#%% Kwargs
# %% Kwargs
# Initialize supported keywords with default value
# Pop the items so they wont be added when calling the
# __init__ of the parent class. Always pop with a default value in case
Expand Down Expand Up @@ -131,7 +132,7 @@ def __init__(self,
raise TypeError(f"Given {bool_keyword} is of type "
f"{type(keyword_value).__name__} but should be type bool")

#%% Initialize all public parameters
# %% Initialize all public parameters
super().__init__(cd, **kwargs)
# Set sim_api
self.sim_api = sim_api
Expand All @@ -153,7 +154,7 @@ def __init__(self,
"stop_time": self.calibration_class.stop_time}
)

#%% Setup the logger
# %% Setup the logger
# De-register the logger setup in the optimization class:
if self.verbose_logging:
self.logger = visualizer.CalibrationVisualizer(
Expand Down Expand Up @@ -243,13 +244,13 @@ def obj(self, xk, *args):
)
return self.ret_val_on_error

total_res = self._kpi_and_logging_calculation(
total_res, unweighted_objective = self._kpi_and_logging_calculation(
xk_descaled=xk_descaled,
counter=self._counter,
results=sim_target_data
)

return total_res
return total_res, unweighted_objective

def mp_obj(self, x, *args):
# Initialize list for results
Expand All @@ -268,12 +269,14 @@ def mp_obj(self, x, *args):
xk_descaled_list.append(xk_descaled)
# Update Parameters
parameter_copy = parameters.copy()
parameter_copy.update({name: value for name, value in zip(initial_names, xk_descaled.values)})
parameter_copy.update(
{name: value for name, value in zip(initial_names, xk_descaled.values)})
parameter_list.append(parameter_copy)

# Simulate
if self.save_files:
result_file_names = [f"simulation_{self._counter + idx}" for idx in range(len(parameter_list))]
result_file_names = [f"simulation_{self._counter + idx}" for idx in
range(len(parameter_list))]
_filepaths = self.sim_api.simulate(
parameters=parameter_list,
return_option="savepath",
Expand Down Expand Up @@ -304,7 +307,7 @@ def mp_obj(self, x, *args):
if result is None:
total_res_list[idx] = self.ret_val_on_error
continue
total_res = self._kpi_and_logging_calculation(
total_res, unweighted_objective = self._kpi_and_logging_calculation(
xk_descaled=xk_descaled_list[idx],
counter=self._counter,
results=result
Expand Down Expand Up @@ -370,7 +373,7 @@ def _kpi_and_logging_calculation(self, *, xk_descaled, counter, results):
f"of iterations {self.max_itercount} has been reached."
)

return total_res
return total_res, unweighted_objective

def calibrate(self, framework, method=None, **kwargs) -> dict:
"""
Expand All @@ -380,7 +383,7 @@ def calibrate(self, framework, method=None, **kwargs) -> dict:
arguments in Optimizer.optimize(). Look at the docstring
in ebcpy to know which options are available.
"""
#%% Start Calibration:
# %% Start Calibration:
self.at_calibration = True
self.logger.log(f"Start calibration of model: {self.sim_api.model_name}"
f" with framework-class {self.__class__.__name__}")
Expand Down Expand Up @@ -417,7 +420,7 @@ def calibrate(self, framework, method=None, **kwargs) -> dict:
"Can't save or return any results."
)

#%% Save the relevant results.
# %% Save the relevant results.
self.logger.save_calibration_result(self._current_best_iterate,
self.sim_api.model_name,
duration=t_cal,
Expand Down Expand Up @@ -486,7 +489,7 @@ def save_results(self, parameter_values: dict, filename: str):
with open(s_path, 'w') as json_file:
json.dump(parameter_values, json_file, indent=4)

def validate(self, validation_class: CalibrationClass, calibration_result: Dict):
def validate(self, validation_class: CalibrationClass, calibration_result: Dict, verbose=False):
"""
Validate the given calibration class based on the given
values for tuner_parameters.
Expand All @@ -501,10 +504,10 @@ def validate(self, validation_class: CalibrationClass, calibration_result: Dict)
self.logger.log(f"Start validation of model: {self.sim_api.model_name} with "
f"framework-class {self.__class__.__name__}")
# Use start-time of calibration class
self.calibration_class = validation_class
start_time = self._apply_start_time_method(
start_time=self.calibration_class.start_time
)
self.calibration_class = validation_class
old_tuner_paras = copy(self.calibration_class.tuner_paras)
tuner_values = list(calibration_result.values())
self.calibration_class.tuner_paras = TunerParas(
Expand All @@ -526,12 +529,29 @@ def validate(self, validation_class: CalibrationClass, calibration_result: Dict)
# Scale the tuner parameters
xk = self.tuner_paras.scale(tuner_values)
# Evaluate objective
obj = self.obj(xk=xk)
obj, unweighted_objective = self.obj(xk=xk)
self.logger.validation_callback_func(
obj=obj
)
# Reset tuner_parameters to avoid unwanted behaviour
self.calibration_class.tuner_paras = old_tuner_paras
if verbose:
weights = [1]
objectives = [obj]
goals = ['all']
for goal, val in unweighted_objective.items():
weights.append(val[0])
objectives.append(val[1])
goals.append(goal)
index = pd.MultiIndex.from_product(
[[validation_class.name], goals],
names=['Class', 'Goal']
)
obj_verbos = pd.DataFrame(
{'weight': weights, validation_class.goals.statistical_measure: objectives},
index=index
)
return obj_verbos
return obj

def _handle_error(self, error):
Expand Down
2 changes: 2 additions & 0 deletions aixcalibuha/calibration/multi_class_calibrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,8 @@ def calibrate(self, framework, method=None, **kwargs) -> dict:
for cal_run in self._cal_history:
for p_name in cal_run['res']['Parameters'].index:
parameter_values[p_name] = cal_run['res']['Parameters'][p_name]
for p_name, res_intersection in res_tuner.items():
parameter_values[p_name] = res_intersection
self.save_results(parameter_values=parameter_values,
filename='MultiClassCalibrationResult')

Expand Down
22 changes: 11 additions & 11 deletions aixcalibuha/data_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from ebcpy import TimeSeriesData
from ebcpy.utils.statistics_analyzer import StatisticsAnalyzer
from ebcpy.preprocessing import convert_datetime_index_to_float_index

# pylint: disable=I1101

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -146,8 +147,8 @@ def __init__(self,
# Set the weightings, if not specified.
self._num_goals = len(_columns)
if weightings is None:
self._weightings = np.array([1/self._num_goals
for i in range(self._num_goals)])
self.weightings = np.array([1 / self._num_goals
for i in range(self._num_goals)])
else:
if not isinstance(weightings, (list, np.ndarray)):
raise TypeError(f"weightings is of type {type(weightings).__name__} "
Expand All @@ -156,7 +157,7 @@ def __init__(self,
raise IndexError(f"The given number of weightings ({len(weightings)}) "
f"does not match the number of "
f"goals ({self._num_goals})")
self._weightings = np.array(weightings) / sum(weightings)
self.weightings = np.array(weightings) / sum(weightings)

def __str__(self):
"""Overwrite string method to present the Goals-Object more
Expand Down Expand Up @@ -213,10 +214,8 @@ def eval_difference(self, verbose=False, penaltyfactor=1):
)
# Apply penalty function
_diff = _diff * penaltyfactor

_verbose_calculation[self._weightings[i]] = _diff
total_difference += self._weightings[i] * _diff

_verbose_calculation[goal_name] = (self.weightings[i], _diff)
total_difference += self.weightings[i] * _diff
if verbose:
return total_difference, _verbose_calculation
return total_difference
Expand Down Expand Up @@ -368,6 +367,7 @@ class TunerParas:
m_flow_2 0.02 0.01 0.1 0.09
heatConv_a 200.00 10.00 300.0 290.00
"""

def __init__(self, names, initial_values, bounds=None):
"""Initialize class-objects and check correct input."""
# Check if the given input-parameters are of correct format. If not, raise an error.
Expand Down Expand Up @@ -426,7 +426,7 @@ def scale(self, descaled):
# If no bounds are given, scaling is not possible--> descaled = scaled
if self._bounds is None:
return descaled
_scaled = (descaled - self._df["min"])/self._df["scale"]
_scaled = (descaled - self._df["min"]) / self._df["scale"]
if not all((_scaled >= 0) & (_scaled <= 1)):
warnings.warn("Given descaled values are outside "
"of bounds. Automatically limiting "
Expand All @@ -446,12 +446,12 @@ def descale(self, scaled):
if not self._bounds:
return scaled
_scaled = np.array(scaled)
if not all((_scaled >= 0-1e4) & (_scaled <= 1+1e4)):
if not all((_scaled >= 0 - 1e4) & (_scaled <= 1 + 1e4)):
warnings.warn("Given scaled values are outside of bounds. "
"Automatically limiting the values with "
"respect to the bounds.")
_scaled = np.clip(_scaled, a_min=0, a_max=1)
return _scaled*self._df["scale"] + self._df["min"]
return _scaled * self._df["scale"] + self._df["min"]

@property
def bounds(self):
Expand Down Expand Up @@ -706,7 +706,7 @@ def merge_calibration_classes(calibration_classes):
"inputs": deepcopy(cal_class.inputs),
"input_kwargs": deepcopy(cal_class.input_kwargs)
}

# Convert dict to actual calibration-classes
cal_classes_merged = []
for _name, values in temp_merged.items():
Expand Down
4 changes: 4 additions & 0 deletions aixcalibuha/sensitivity_analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@
This package contains classes to perform
sensitivity analysis with.
"""

from aixcalibuha.sensitivity_analysis import plotting
from .sensitivity_analyzer import SenAnalyzer
from .sobol import SobolAnalyzer
from .morris import MorrisAnalyzer
from .fast import FASTAnalyzer
from .pawn import PAWNAnalyzer
Loading

0 comments on commit f937d3d

Please sign in to comment.