From b3edc621f6efc727e79390e71d0a8a2eb0fa9238 Mon Sep 17 00:00:00 2001 From: Oleksandr Kozachuk Date: Fri, 24 Mar 2023 17:28:56 +0100 Subject: [PATCH] Add an optional fix layer to the answer. --- fjerkroa_bot/ai_responder.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/fjerkroa_bot/ai_responder.py b/fjerkroa_bot/ai_responder.py index dc2ebd4..4407e4a 100644 --- a/fjerkroa_bot/ai_responder.py +++ b/fjerkroa_bot/ai_responder.py @@ -118,6 +118,21 @@ class AIResponder(object): logging.warning(f"failed to generate response: {repr(err)}") return None, limit + async def fix(self, answer: str) -> str: + if 'fix-model' not in self.config: + return answer + messages = [{"role": "system", "content": self.config["fix-description"]}, + {"role": "user", "content": answer}] + for _ in range(4): + try: + result = await openai.ChatCompletion.acreate(model=self.config["fix-model"], + messages=messages, + temperature=0.2) + return result['chaices'][0]['message']['content'] + except Exception as err: + logging.warning(f"failed to execute a fix for the answer: {repr(err)}") + return answer + async def send(self, message: AIMessage) -> AIResponse: limit = self.config["history-limit"] if self.short_path(message, limit): @@ -128,6 +143,7 @@ class AIResponder(object): answer, limit = await self._acreate(messages, limit) if answer is None: continue + answer['content'] = await self.fix(answer['content']) try: response = json.loads(answer['content']) except Exception as err: