Skip to content

Commit

Permalink
Merge pull request #26 from stanford-oval/Yucheng-Jiang-patch-modify-…
Browse files Browse the repository at this point in the history
…eval-path

update evaluation data path
  • Loading branch information
Yucheng-Jiang committed Apr 23, 2024
2 parents 195aa99 + 7cdcc36 commit 72d85c9
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion eval/eval_article_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def main(args):
help='Directory to store the evaluation results. '
'Each article evaluation will be saved as separate file named after {topic_name}.json')
parser.add_argument('--pred-file-name', help='Name of the article file.')
parser.add_argument("--prompt-template-path", default="./prompts/eval_prometheus_no_ref.prompt",
parser.add_argument("--prompt-template-path", default="./eval_prometheus_no_ref.prompt",
help='path to evaluation prometheus prompt template')
parser.add_argument("--rubric-path", default="./eval_rubric_5.json", help='path to rubric json file')

Expand Down
6 changes: 3 additions & 3 deletions eval/evaluation_prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ def get_grading_dict(responses,
topic,
tokenizer,
model,
prompt_template_path="./prompts/eval_prometheus_no_ref.prompt",
rubric_path="./prompts/eval_rubric_5.json",
prompt_template_path="./eval_prometheus_no_ref.prompt",
rubric_path="./eval_rubric_5.json",
disable_sample=False,
temperature=0.01,
top_p=0.95,
Expand Down Expand Up @@ -165,7 +165,7 @@ def main(args):
parser.add_argument('-o', '--output_path', required=True, help='Path to save the output JSON file')
parser.add_argument('-t', "--topic", required=True, help="Topic of the script your going to analyze")

parser.add_argument("--prompt_template_path", default="./prompts/eval_prometheus_no_ref.prompt",
parser.add_argument("--prompt_template_path", default="./eval_prometheus_no_ref.prompt",
help='path to evaluation prometheus prompt template')
parser.add_argument("--rubric_path", default="./prompts/eval_rubric_5.json", help='path to rubric json file')

Expand Down

0 comments on commit 72d85c9

Please sign in to comment.