Skip to content

Commit

Permalink
Expose inputs and outputs to data functions. (#64)
Browse files Browse the repository at this point in the history
  • Loading branch information
bbassett-tibco committed May 30, 2024
1 parent e89e881 commit 65ca640
Show file tree
Hide file tree
Showing 2 changed files with 70 additions and 18 deletions.
65 changes: 47 additions & 18 deletions spotfire/data_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,27 @@ def __init__(self, name: str, input_type: str, file: str) -> None:
:param input_type: whether the input is a ``table``, a ``column``, or a ``value``
:param file: the filename of the SBDF file that contains the data to read into this input
"""
self.name = name
self.type = input_type
self.file = file
self._name = name
self._type = input_type
self._file = file

def __repr__(self) -> str:
return f"{_utils.type_name(type(self))}({self.name!r}, {self.type!r}, {self.file!r})"
return f"{_utils.type_name(type(self))}({self._name!r}, {self._type!r}, {self._file!r})"

@property
def name(self) -> str:
"""Get the name of this input."""
return self._name

@property
def type(self) -> str:
"""Get the type of this input. Will be one of ``table``, ``column``, or ``value``."""
return self._type

@property
def file(self) -> str:
"""Get the filename of the SBDF file to deserialize this input from."""
return self._file

def read(self, globals_dict: _Globals, debug_fn: _LogFunction) -> None:
"""Read an input from the corresponding SBDF file into the dict that comprises the set of globals.
Expand All @@ -107,12 +122,12 @@ def read(self, globals_dict: _Globals, debug_fn: _LogFunction) -> None:
"""
# pylint: disable=too-many-branches

if self.type == "NULL":
debug_fn(f"assigning missing '{self.name}' as None")
globals_dict[self.name] = None
if self._type == "NULL":
debug_fn(f"assigning missing '{self._name}' as None")
globals_dict[self._name] = None
return
debug_fn(f"assigning {self.type} '{self.name}' from file {self.file}")
dataframe = sbdf.import_data(self.file)
debug_fn(f"assigning {self._type} '{self._name}' from file {self._file}")
dataframe = sbdf.import_data(self._file)
debug_fn(f"read {dataframe.shape[0]} rows {dataframe.shape[1]} columns")

# Table metadata
Expand Down Expand Up @@ -149,9 +164,9 @@ def read(self, globals_dict: _Globals, debug_fn: _LogFunction) -> None:
debug_fn(f"column metadata:{column_meta}")

# Argument type
if self.type == "column":
if self._type == "column":
dataframe = dataframe[dataframe.columns[0]]
if self.type == "value":
if self._type == "value":
value = dataframe.at[0, dataframe.columns[0]]
if type(value).__module__ == "numpy":
dataframe = value.tolist()
Expand All @@ -165,7 +180,7 @@ def read(self, globals_dict: _Globals, debug_fn: _LogFunction) -> None:
dataframe = value

# Store to global dict
globals_dict[self.name] = dataframe
globals_dict[self._name] = dataframe


class AnalyticOutput:
Expand All @@ -177,20 +192,30 @@ def __init__(self, name: str, file: str) -> None:
:param name: the name of the output
:param file: the filename of the SBDF file that will be created by writing from this output
"""
self.name = name
self.file = file
self._name = name
self._file = file

def __repr__(self) -> str:
return f"{_utils.type_name(type(self))}({self.name!r}, {self.file!r})"
return f"{_utils.type_name(type(self))}({self._name!r}, {self._file!r})"

@property
def name(self) -> str:
"""Get the name of this output."""
return self._name

@property
def file(self) -> str:
"""Get the filename of the SBDF file to serialize this output to."""
return self._file

def write(self, globals_dict: _Globals, debug_fn: _LogFunction) -> None:
"""Write an output from the dict that comprises the set of globals file into the corresponding SBDF.
:param globals_dict: dict containing the global variables from the data function
:param debug_fn: logging function for debug messages
"""
debug_fn(f"returning '{self.name}' as file {self.file}")
sbdf.export_data(globals_dict[self.name], self.file, default_column_name=self.name)
debug_fn(f"returning '{self._name}' as file {self._file}")
sbdf.export_data(globals_dict[self._name], self._file, default_column_name=self._name)


class AnalyticResult:
Expand Down Expand Up @@ -256,7 +281,11 @@ def __init__(self, analytic_type: str, inputs: list[AnalyticInput], outputs: lis
self.script = script
self.debug_enabled = False
self.script_filename = '<data_function>'
self.globals = {'__builtins__': __builtins__}
self.globals = {
'__builtins__': __builtins__,
'__spotfire_inputs__': tuple(inputs),
'__spotfire_outputs__': tuple(outputs),
}
self.log = io.StringIO()
self.compiled_script = None

Expand Down
23 changes: 23 additions & 0 deletions spotfire/test/test_data_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,3 +397,26 @@ def test_script_filename(self):
expected = _PythonVersionedExpectedValue("script_filename")
self._run_analytic("raise ValueError('nope')", {}, {}, False, expected,
spec_adjust=lambda x: self._script_filename(x, "subdir/value_error.py"))

def test_spotfire_inputs_dunder(self):
"""Test that the ``__spotfire_inputs__`` object works"""
in1_df = pd.DataFrame({"a": [1, 2, 3, 4, 5]})
out_df = pd.DataFrame({"names": ["in1", "in2", "in3"], "types": ["table", "column", "value"]})
self._run_analytic("""import pandas as pd
names = []
types = []
for i in __spotfire_inputs__:
names.append(i.name)
types.append(i.type)
out = pd.DataFrame({'names': names, 'types': types})""", {"in1": in1_df, "in2": [1, 2, 3, 4, 5], "in3": 0},
{"out": out_df}, True, None)

def test_spotfire_outputs_dunder(self):
"""Test that the ``__spotfire_outputs__`` object works"""
a_df = pd.DataFrame({"a": ["a", "b", "c", "d", "e"]})
b_df = pd.DataFrame({"b": ["x"]})
c_df = pd.DataFrame({"c": ["x"]})
d_df = pd.DataFrame({"d": ["x"]})
e_df = pd.DataFrame({"e": ["x"]})
self._run_analytic("""a = [x.name for x in __spotfire_outputs__]
b = c = d = e = 'x'""", {}, {"a": a_df, "b": b_df, "c": c_df, "d": d_df, "e": e_df}, True, None)

0 comments on commit 65ca640

Please sign in to comment.