diff --git a/model_tools/__init__.py b/model_tools/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/model_tools/parameters_storage.py b/model_tools/parameters_storage.py deleted file mode 100644 index 1c793c411a383bf72a1b3a3bd4ce1b9f6f689974..0000000000000000000000000000000000000000 --- a/model_tools/parameters_storage.py +++ /dev/null @@ -1,180 +0,0 @@ -from multikeydict.nestedmkdict import NestedMKDict -from multikeydict.visitor import NestedMKDictVisitor -from dagflow.output import Output - -from typing import Union, Tuple, List, Optional - -from tabulate import tabulate -from pandas import DataFrame -from LaTeXDatax import datax - -from numpy import nan -import pandas as pd -pd.set_option('display.max_rows', None) -pd.set_option('display.max_colwidth', 100) - -from shutil import get_terminal_size - -def trunc(text: str, width: int) -> str: - return '\n'.join(line[:width] for line in text.split('\n')) - -class ParametersStorage(NestedMKDict): - def read_paths(self) -> None: - for key, value in self.walkitems(): - labels = getattr(value, 'labels', None) - if labels is None: - continue - - key = '.'.join(key) - labels.paths.append(key) - - def plot( - self, - *args, - **kwargs - ) -> None: - from dagflow.plot import plot_auto - for _, value in self.walkitems(): - if not isinstance(value, Output): - continue - - plot_auto(value, *args, **kwargs) - - def to_list(self, **kwargs) -> list: - return self.visit(ParametersVisitor(kwargs)).data_list - - def to_dict(self, **kwargs) -> NestedMKDict: - return self.visit(ParametersVisitor(kwargs)).data_dict - - def to_df(self, *, columns: Optional[List[str]]=None, **kwargs) -> DataFrame: - dct = self.to_list(**kwargs) - if columns is None: - columns = ['path', 'value', 'central', 'sigma', 'flags', 'label'] - df = DataFrame(dct, columns=columns) - - df.insert(4, 'sigma_rel_perc', df['sigma']) - df['sigma_rel_perc'] = df['sigma']/df['central']*100. - df['sigma_rel_perc'].mask(df['central']==0, nan, inplace=True) - - for key in ('central', 'sigma', 'sigma_rel_perc'): - if df[key].isna().all(): - del df[key] - else: - df[key].fillna('-', inplace=True) - - df['value'].fillna('-', inplace=True) - df['flags'].fillna('', inplace=True) - df['label'].fillna('', inplace=True) - - if (df['flags']=='').all(): - del df['flags'] - - return df - - def to_string(self, **kwargs) -> str: - df = self.to_df() - return df.to_string(**kwargs) - - def to_table( - self, - *, - df_kwargs: dict={}, - truncate: Union[int, bool] = False, - **kwargs - ) -> str: - df = self.to_df(**df_kwargs) - kwargs.setdefault('headers', df.columns) - ret = tabulate(df, **kwargs) - - if truncate: - if isinstance(truncate, bool): - truncate = get_terminal_size().columns - - return trunc(ret, width=truncate) - - - return ret - - def to_latex(self, *, return_df: bool=False, **kwargs) -> Union[str, Tuple[str, DataFrame]]: - df = self.to_df(label_from='latex', **kwargs) - tex = df.to_latex(escape=False) - - if return_df: - return tex, df - - return tex - - def to_datax(self, filename: str, **kwargs) -> None: - data = self.to_dict(**kwargs) - skip = {'path', 'label', 'flags'} # TODO, add LaTeX label - odict = {'.'.join(k): v for k, v in data.walkitems() if not (k and k[-1] in skip)} - datax(filename, **odict) - -class ParametersVisitor(NestedMKDictVisitor): - __slots__ = ('_kwargs', '_data_list', '_localdata', '_path') - _kwargs: dict - _data_list: List[dict] - _data_dict: NestedMKDict - _localdata: List[dict] - _paths: List[Tuple[str, ...]] - _path: Tuple[str, ...] - # _npars: List[int] - - def __init__(self, kwargs: dict): - self._kwargs = kwargs - # self._npars = [] - - @property - def data_list(self) -> List[dict]: - return self._data_list - - @property - def data_dict(self) -> NestedMKDict: - return self._data_dict - - def start(self, dct): - self._data_list = [] - self._data_dict = NestedMKDict({}, sep='.') - self._path = () - self._paths = [] - self._localdata = [] - - def enterdict(self, k, v): - if self._localdata: - self.exitdict(self._path, None) - self._path = k - self._paths.append(self._path) - self._localdata = [] - - def visit(self, key, value): - try: - dct = value.to_dict(**self._kwargs) - except AttributeError: - return - - subkey = key[len(self._path):] - subkeystr = '.'.join(subkey) - - if self._path: - dct['path'] = f'.. {subkeystr}' - else: - dct['path'] = subkeystr - - self._localdata.append(dct) - - self._data_dict[key]=dct - - def exitdict(self, k, v): - if self._localdata: - self._data_list.append({ - 'path': f"group: {'.'.join(self._path)} [{len(self._localdata)}]" - }) - self._data_list.extend(self._localdata) - self._localdata = [] - if self._paths: - del self._paths[-1] - - self._path = self._paths[-1] if self._paths else () - - def stop(self, dct): - pass diff --git a/models/dayabay_v0.py b/models/dayabay_v0.py index e65912baba3450cfed8cba9ba7c0b4e52333a8be..b2e06e54d7f3b5c9d6d2a04c05f97d01698bbb50 100644 --- a/models/dayabay_v0.py +++ b/models/dayabay_v0.py @@ -6,10 +6,10 @@ from dagflow.graphviz import savegraph from dagflow.lib.arithmetic import Sum from dagflow.tools.schema import LoadYaml from gindex import GNIndex -from model_tools.parameters_storage import ParametersStorage +from dagflow.storage import NodeStorage def model_dayabay_v0(): - storage = ParametersStorage({}, sep='.') + storage = NodeStorage({}, sep='.') datasource = Path('data/dayabay-v0') index = GNIndex.from_dict({ @@ -27,25 +27,25 @@ def model_dayabay_v0(): list_dr = idx_rd.values list_reactors_isotopes = idx_ri.values - with Graph(close=True) as g: + with Graph(close=True) as graph, storage: # # Load parameters # - storage ^= load_parameters({'path': 'ibd' , 'load': datasource/'parameters/pdg2012.yaml'}) - storage ^= load_parameters({'path': 'ibd.csc' , 'load': datasource/'parameters/ibd_constants.yaml'}) - storage ^= load_parameters({'path': 'conversion' , 'load': datasource/'parameters/conversion_thermal_power.yaml'}) - storage ^= load_parameters({'path': 'conversion' , 'load': datasource/'parameters/conversion_oscprob_argument.yaml'}) + load_parameters({'path': 'ibd' , 'load': datasource/'parameters/pdg2012.yaml'}) + load_parameters({'path': 'ibd.csc' , 'load': datasource/'parameters/ibd_constants.yaml'}) + load_parameters({'path': 'conversion' , 'load': datasource/'parameters/conversion_thermal_power.yaml'}) + load_parameters({'path': 'conversion' , 'load': datasource/'parameters/conversion_oscprob_argument.yaml'}) - storage ^= load_parameters({ 'load': datasource/'parameters/baselines.yaml'}) + load_parameters({ 'load': datasource/'parameters/baselines.yaml'}) - storage ^= load_parameters({'path': 'detector' , 'load': datasource/'parameters/detector_nprotons_correction.yaml'}) - storage ^= load_parameters({'path': 'detector' , 'load': datasource/'parameters/detector_eres.yaml'}) + load_parameters({'path': 'detector' , 'load': datasource/'parameters/detector_nprotons_correction.yaml'}) + load_parameters({'path': 'detector' , 'load': datasource/'parameters/detector_eres.yaml'}) - storage ^= load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_e_per_fission.yaml'}) - storage ^= load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_thermal_power_nominal.yaml' , 'replicate': list_reactors }) - storage ^= load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_snf.yaml' , 'replicate': list_reactors }) - storage ^= load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_offequilibrium_correction.yaml' , 'replicate': list_reactors_isotopes }) - storage ^= load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_fission_fraction_scale.yaml' , 'replicate': list_reactors , 'replica_key_offset': 1 }) + load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_e_per_fission.yaml'}) + load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_thermal_power_nominal.yaml' , 'replicate': list_reactors }) + load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_snf.yaml' , 'replicate': list_reactors }) + load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_offequilibrium_correction.yaml' , 'replicate': list_reactors_isotopes }) + load_parameters({'path': 'reactor' , 'load': datasource/'parameters/reactor_fission_fraction_scale.yaml' , 'replicate': list_reactors , 'replica_key_offset': 1 }) # Create Nuisance parameters nuisanceall = Sum('nuisance total') @@ -103,7 +103,7 @@ def model_dayabay_v0(): storage.to_datax('output/dayabay_v0_data.tex') from dagflow.graphviz import GraphDot - GraphDot.from_graph(g, show='all').savegraph("output/dayabay_v0.dot") + GraphDot.from_graph(graph, show='all').savegraph("output/dayabay_v0.dot") GraphDot.from_node(storage['parameter_node.constrained.reactor.fission_fraction_scale.DB1'].constraint._norm_node, show='all', minsize=2).savegraph("output/dayabay_v0_large.dot") GraphDot.from_node(storage['stat.nuisance.all'], show='all', mindepth=-1, no_forward=True).savegraph("output/dayabay_v0_nuisance.dot") GraphDot.from_output(storage['outputs.edges.energy_evis'], show='all', mindepth=-3, no_forward=True).savegraph("output/dayabay_v0_top.dot") diff --git a/models/dayabay_v1.py b/models/dayabay_v1.py index eeb37080c6da75150fb039e16c7ef08dca2dcef2..11398aa2b30fc565d92901b748615a13be84e9fc 100644 --- a/models/dayabay_v1.py +++ b/models/dayabay_v1.py @@ -7,10 +7,10 @@ from dagflow.lib.arithmetic import Sum from gindex import GNIndex -from model_tools.parameters_storage import ParametersStorage +from dagflow.storage import NodeStorage def model_dayabay_v1(): - storage = ParametersStorage({}, sep='.') + storage = NodeStorage({}, sep='.') datasource = Path('data/dayabay-v1') index = GNIndex.from_dict({