Add an optional fix layer to the answer.
This commit is contained in:
parent
592eb0cb2c
commit
b3edc621f6
@ -118,6 +118,21 @@ class AIResponder(object):
|
|||||||
logging.warning(f"failed to generate response: {repr(err)}")
|
logging.warning(f"failed to generate response: {repr(err)}")
|
||||||
return None, limit
|
return None, limit
|
||||||
|
|
||||||
|
async def fix(self, answer: str) -> str:
|
||||||
|
if 'fix-model' not in self.config:
|
||||||
|
return answer
|
||||||
|
messages = [{"role": "system", "content": self.config["fix-description"]},
|
||||||
|
{"role": "user", "content": answer}]
|
||||||
|
for _ in range(4):
|
||||||
|
try:
|
||||||
|
result = await openai.ChatCompletion.acreate(model=self.config["fix-model"],
|
||||||
|
messages=messages,
|
||||||
|
temperature=0.2)
|
||||||
|
return result['chaices'][0]['message']['content']
|
||||||
|
except Exception as err:
|
||||||
|
logging.warning(f"failed to execute a fix for the answer: {repr(err)}")
|
||||||
|
return answer
|
||||||
|
|
||||||
async def send(self, message: AIMessage) -> AIResponse:
|
async def send(self, message: AIMessage) -> AIResponse:
|
||||||
limit = self.config["history-limit"]
|
limit = self.config["history-limit"]
|
||||||
if self.short_path(message, limit):
|
if self.short_path(message, limit):
|
||||||
@ -128,6 +143,7 @@ class AIResponder(object):
|
|||||||
answer, limit = await self._acreate(messages, limit)
|
answer, limit = await self._acreate(messages, limit)
|
||||||
if answer is None:
|
if answer is None:
|
||||||
continue
|
continue
|
||||||
|
answer['content'] = await self.fix(answer['content'])
|
||||||
try:
|
try:
|
||||||
response = json.loads(answer['content'])
|
response = json.loads(answer['content'])
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user