Skip to content

Commit

Permalink
Improve error messages and formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
J535D165 committed Jun 18, 2023
1 parent 413fd34 commit 8272698
Show file tree
Hide file tree
Showing 14 changed files with 126 additions and 128 deletions.
12 changes: 8 additions & 4 deletions asreviewcontrib/makita/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def get_template_fp(name):


def is_valid_template(fp):
if Path(fp).is_file():
if fp and Path(fp).is_file():
return True
else:
raise ValueError(f"Template {fp} not found")
Expand Down Expand Up @@ -53,9 +53,11 @@ def execute(self, argv): # noqa: C901
self._template(args_name, args_program)
except Exception as err:
print(f"\u001b[31mERROR: {err}\u001b[0m")

elif args_program.tool == "add-script":
self._add_script(args_name, args_program)
try:
self._add_script(args_name, args_program)
except Exception as err:
print(f"\u001b[31mERROR: {err}\u001b[0m")
else:
parser = _parse_arguments_program(self.version, add_help=True)
parser.parse_args(argv)
Expand Down Expand Up @@ -98,7 +100,9 @@ def _template(self, args_name, args_program):
args = parser.parse_args(args_name)

# check if a custom template is used, otherwise use the default template
fp_template = args.template or get_template_fp(args_template.name)
fp_template = args.template or (
args_template.name and get_template_fp(args_template.name)
)
is_valid_template(fp_template)

# load datasets
Expand Down
12 changes: 3 additions & 9 deletions asreviewcontrib/makita/template_arfi.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,17 +53,15 @@ def render_jobs_arfi(
}
)

# Instantiate a ConfigTemplate object, initializing a Jinja2 environment and
# Instantiate a ConfigTemplate object, initializing a Jinja2 environment and
# setting up template variables and extensions.
template = ConfigTemplate(fp_template)

# render scripts
if template.scripts is not None:
for s in template.scripts:
t_script = file_handler.render_file_from_template(
s,
"script",
output_folder=output_folder
s, "script", output_folder=output_folder
)
export_fp = Path(scripts_folder, s)
file_handler.add_file(t_script, export_fp)
Expand All @@ -75,9 +73,7 @@ def render_jobs_arfi(
s,
"doc",
datasets=datasets,
template_name=template.name
if template.name == "ARFI"
else "custom",
template_name=template.name if template.name == "ARFI" else "custom",
template_name_long=template.name_long,
template_scripts=template.scripts,
output_folder=output_folder,
Expand All @@ -101,7 +97,6 @@ def render_jobs_arfi(
)



def _get_priors(dataset, init_seed, n_priors):
"""Sample priors."""
asdata = ASReviewData.from_file(dataset)
Expand All @@ -127,4 +122,3 @@ def _get_priors(dataset, init_seed, n_priors):
priors.append(list(map(str, priors_list)))

return priors

10 changes: 3 additions & 7 deletions asreviewcontrib/makita/template_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,17 +51,15 @@ def render_jobs_basic(
}
)

# Instantiate a ConfigTemplate object, initializing a Jinja2 environment and
# Instantiate a ConfigTemplate object, initializing a Jinja2 environment and
# setting up template variables and extensions.
template = ConfigTemplate(fp_template)

# render scripts
if template.scripts is not None:
for s in template.scripts:
t_script = file_handler.render_file_from_template(
s,
"script",
output_folder=output_folder
s, "script", output_folder=output_folder
)
export_fp = Path(scripts_folder, s)
file_handler.add_file(t_script, export_fp)
Expand All @@ -73,9 +71,7 @@ def render_jobs_basic(
s,
"doc",
datasets=datasets,
template_name=template.name
if template.name == "basic"
else "custom",
template_name=template.name if template.name == "basic" else "custom",
template_name_long=template.name_long,
template_scripts=template.scripts,
output_folder=output_folder,
Expand Down
6 changes: 2 additions & 4 deletions asreviewcontrib/makita/template_multiple_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,17 +62,15 @@ def render_jobs_multiple_models(
}
)

# Instantiate a ConfigTemplate object, initializing a Jinja2 environment and
# Instantiate a ConfigTemplate object, initializing a Jinja2 environment and
# setting up template variables and extensions.
template = ConfigTemplate(fp_template)

# render scripts
if template.scripts is not None:
for s in template.scripts:
t_script = file_handler.render_file_from_template(
s,
"script",
output_folder=output_folder
s, "script", output_folder=output_folder
)
export_fp = Path(scripts_folder, s)
file_handler.add_file(t_script, export_fp)
Expand Down
4 changes: 2 additions & 2 deletions asreviewcontrib/makita/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

class FileHandler:
"""
The FileHandler class handles file operations such as adding files and rendering
The FileHandler class handles file operations such as adding files and rendering
scripts.
"""

Expand All @@ -22,7 +22,7 @@ def add_file(self, content, export_fp):
Args:
content (str): The content to be written into the file.
export_fp (Path): A Path object that specifies the directory where the file
export_fp (Path): A Path object that specifies the directory where the file
should be added.
"""

Expand Down
13 changes: 7 additions & 6 deletions examples/arfi_example/scripts/merge_descriptives.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,31 +35,32 @@ def create_table_descriptives(datasets):

for ds in datasets:
with open(ds) as f:
data = json.load(f)['data']['items']
data = json.load(f)["data"]["items"]
values = {}
for item in data:
values[item['id']] = item['value']
values[item["id"]] = item["value"]
stats.append(values)

df = pd.DataFrame(stats, index=[Path(ds).name for ds in datasets])
return df


if __name__ == "__main__":

parser = argparse.ArgumentParser(
description="Merge descriptives of multiple files into single table."
)
parser.add_argument(
"-s",
type=str,
default="output/simulation/*/descriptives/",
help="Datasets location")
help="Datasets location",
)
parser.add_argument(
"-o",
type=str,
default="output/tables/data_descriptives_all.csv",
help="Output table location")
help="Output table location",
)
args = parser.parse_args()

# load datasets
Expand All @@ -75,4 +76,4 @@ def create_table_descriptives(datasets):
# store result in output folder
Path(args.o).parent.mkdir(parents=True, exist_ok=True)
result.to_csv(Path(args.o))
result.to_excel(Path(args.o).with_suffix('.xlsx'))
result.to_excel(Path(args.o).with_suffix(".xlsx"))
26 changes: 12 additions & 14 deletions examples/arfi_example/scripts/merge_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,38 +33,36 @@ def create_table_state_metrics(metric_files):

for metric in metric_files:
with open(metric) as f:
data = json.load(f)['data']['items']
data = json.load(f)["data"]["items"]
values = {}
values['file_name'] = Path(metric).name
values["file_name"] = Path(metric).name
for item in data:
if item['id'] == 'td':
if item["id"] == "td":
continue
# check if value is a list
if item['value'] is not None and isinstance(item['value'], list):
for value in item['value']:
values[item['id'] + "_" + str(value[0])] = value[1]
if item["value"] is not None and isinstance(item["value"], list):
for value in item["value"]:
values[item["id"] + "_" + str(value[0])] = value[1]
else:
values[item['id']] = item['value']
values[item["id"]] = item["value"]
metrics.append(values)

return pd.DataFrame(metrics)


if __name__ == "__main__":

parser = argparse.ArgumentParser(
description="Merge metrics of multiple states into single table."
)
parser.add_argument(
"-s",
type=str,
default="output/simulation/*/metrics/",
help="states location")
"-s", type=str, default="output/simulation/*/metrics/", help="states location"
)
parser.add_argument(
"-o",
type=str,
default="output/tables/metrics_sim_all.csv",
help="Output table location")
help="Output table location",
)
args = parser.parse_args()

# load metric files
Expand All @@ -80,4 +78,4 @@ def create_table_state_metrics(metric_files):
# store result in output folder
Path(args.o).parent.mkdir(parents=True, exist_ok=True)
result.to_csv(Path(args.o))
result.to_excel(Path(args.o).with_suffix('.xlsx'))
result.to_excel(Path(args.o).with_suffix(".xlsx"))
31 changes: 17 additions & 14 deletions examples/arfi_example/scripts/merge_tds.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,21 @@ def create_table_state_tds(metrics):

for metric in metrics:
with open(metric) as f:
i = next(filter(lambda x: x['id'] == 'td', json.load(f)['data']['items']))['value'] # noqa
i = next(filter(lambda x: x["id"] == "td", json.load(f)["data"]["items"]))[
"value"
] # noqa
values.extend((item[0], item[1], file_counter) for item in i)
file_counter += 1

df = pd.DataFrame(values, columns=['record_id', 'td', 'metric_file'])
pivoted = df.pivot_table(index='record_id',
columns='metric_file',
values='td',
aggfunc='first',
fill_value=0)
pivoted.columns = [f'td_sim_{col}' for col in pivoted.columns]
df = pd.DataFrame(values, columns=["record_id", "td", "metric_file"])
pivoted = df.pivot_table(
index="record_id",
columns="metric_file",
values="td",
aggfunc="first",
fill_value=0,
)
pivoted.columns = [f"td_sim_{col}" for col in pivoted.columns]
return pivoted


Expand All @@ -54,15 +58,14 @@ def create_table_state_tds(metrics):
description="Merge tds of multiple metrics into single table."
)
parser.add_argument(
"-s",
type=str,
default="output/simulation/*/metrics/",
help="metrics location")
"-s", type=str, default="output/simulation/*/metrics/", help="metrics location"
)
parser.add_argument(
"-o",
type=str,
default="output/tables/tds_sim_all.csv",
help="Output table location")
help="Output table location",
)
args = parser.parse_args()

# load metric files
Expand All @@ -77,4 +80,4 @@ def create_table_state_tds(metrics):
# store table
Path(args.o).parent.mkdir(parents=True, exist_ok=True)
states_table.to_csv(Path(args.o))
states_table.to_excel(Path(args.o).with_suffix('.xlsx'))
states_table.to_excel(Path(args.o).with_suffix(".xlsx"))
13 changes: 7 additions & 6 deletions examples/basic_example/scripts/merge_descriptives.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,31 +35,32 @@ def create_table_descriptives(datasets):

for ds in datasets:
with open(ds) as f:
data = json.load(f)['data']['items']
data = json.load(f)["data"]["items"]
values = {}
for item in data:
values[item['id']] = item['value']
values[item["id"]] = item["value"]
stats.append(values)

df = pd.DataFrame(stats, index=[Path(ds).name for ds in datasets])
return df


if __name__ == "__main__":

parser = argparse.ArgumentParser(
description="Merge descriptives of multiple files into single table."
)
parser.add_argument(
"-s",
type=str,
default="output/simulation/*/descriptives/",
help="Datasets location")
help="Datasets location",
)
parser.add_argument(
"-o",
type=str,
default="output/tables/data_descriptives_all.csv",
help="Output table location")
help="Output table location",
)
args = parser.parse_args()

# load datasets
Expand All @@ -75,4 +76,4 @@ def create_table_descriptives(datasets):
# store result in output folder
Path(args.o).parent.mkdir(parents=True, exist_ok=True)
result.to_csv(Path(args.o))
result.to_excel(Path(args.o).with_suffix('.xlsx'))
result.to_excel(Path(args.o).with_suffix(".xlsx"))
Loading

0 comments on commit 8272698

Please sign in to comment.