From a65f1e2287d6807f0fdb7fbf7bdea59e60715806 Mon Sep 17 00:00:00 2001 From: Inga Ulusoy Date: Mon, 27 Oct 2025 09:59:03 +0100 Subject: [PATCH] Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- ammico/image_summary.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ammico/image_summary.py b/ammico/image_summary.py index 7d462f2..16aeea5 100644 --- a/ammico/image_summary.py +++ b/ammico/image_summary.py @@ -289,7 +289,7 @@ class ImageSummaryDetector(AnalysisMethod): max_new_tokens = self.token_prompt_config[ "concise" if is_concise_summary else "default" ]["summary"]["max_new_tokens"] - inputs = self._prepare_inputs(prompt, entry) + inputs = self._prepare_inputs([prompt], entry) gen_conf = GenerationConfig( max_new_tokens=max_new_tokens, @@ -384,10 +384,10 @@ class ImageSummaryDetector(AnalysisMethod): """ prompt = self.token_prompt_config[ "concise" if is_concise_answer else "default" - ]["answer"]["prompt"] + ]["questions"]["prompt"] max_new_tokens = self.token_prompt_config[ "concise" if is_concise_answer else "default" - ]["answer"]["max_new_tokens"] + ]["questions"]["max_new_tokens"] list_of_questions = self._clean_list_of_questions(list_of_questions, prompt) gen_conf = GenerationConfig(max_new_tokens=max_new_tokens, do_sample=False)