From af81f458491d313e636b94e464c70c23021ce9b7 Mon Sep 17 00:00:00 2001 From: Jonathan Date: Wed, 29 May 2024 00:59:18 -0700 Subject: [PATCH] a --- llm_eval/handler.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/llm_eval/handler.py b/llm_eval/handler.py index 9d9737b..51ddcb7 100644 --- a/llm_eval/handler.py +++ b/llm_eval/handler.py @@ -74,7 +74,7 @@ def post_process_output(self, prompt, output): if self.current_model == 'meta-llama/Llama-2-7b-chat-hf': output = output[len(prompt)-1:] pattern = re.compile(r'\{\s*"(.+?)"\s*:\s*"(.+?)"\s*\}') - match = re.findall(pattern, "target_text")[-1] + match = re.findall(pattern, output)[-1] return {match.group(1): match.group(2)} if match else output def prepare_output(self): diff --git a/pyproject.toml b/pyproject.toml index ab4f832..45ed69f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta" [project] name = "llm-eval" -version = "0.5.10" +version = "0.5.11" authors = [ {name = "Jonathan Eisenzopf", email = "jonathan.eisenzopf@talkmap.com"}, ]