diff --git a/llm_eval/handler.py b/llm_eval/handler.py index bcb4bd9..9d9737b 100644 --- a/llm_eval/handler.py +++ b/llm_eval/handler.py @@ -74,7 +74,7 @@ def post_process_output(self, prompt, output): if self.current_model == 'meta-llama/Llama-2-7b-chat-hf': output = output[len(prompt)-1:] pattern = re.compile(r'\{\s*"(.+?)"\s*:\s*"(.+?)"\s*\}') - match = re.search(pattern, output) + match = re.findall(pattern, "target_text")[-1] return {match.group(1): match.group(2)} if match else output def prepare_output(self): diff --git a/pyproject.toml b/pyproject.toml index 9578268..ab4f832 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta" [project] name = "llm-eval" -version = "0.5.9" +version = "0.5.10" authors = [ {name = "Jonathan Eisenzopf", email = "jonathan.eisenzopf@talkmap.com"}, ]