async def ask(question,api_key,old):
client = AsyncClient(
api_key=api_key
)
try:
stream = await client.chat.completions.create(
model="gpt-3.5-turbo-1106",
messages=old + [{"role": "user", "content": question}],
stream=True
)
return {'stream':stream,'client':client}
except Exception as e:
print(e)
await client.close()
if 'Error code: 429' in e.args[0]:
db.delete_token_from_file(api_key)
return 0
async def get_gpt_stream_answer(user_id, promt, old, i, message_to_edit: types.Message):
print("Вопрос задался")
if i >= 3:
print(f"{user_id} Не получил ответ")
db.clear_chat(user_id)
return 0
if len(old) > 10:
old = old[-10:]
token = get_random_token()
data = await gpt.ask(promt, token, old)
if data == 0:
print(f"{user_id} Попытка #{i}")
return (await get_gpt_stream_answer(user_id, promt, old, i + 1, message_to_edit))
try:
response_text = ""
speed = 100
i = 0
async for chunk in data['stream']:
response_text += chunk.choices[0].delta.content or ""
if i >= speed:
try:
await message_to_edit.edit_text(
text=response_text
)
except Exception as e:
if 'Message_too_long' in e.args[0]:
break
i = 0
i+=1
await message_to_edit.edit_text(
text=response_text
)
db.insert_new_message(user_id, 'user', promt,None)
db.insert_new_message(user_id, 'assistant', response_text,None)
return response_text
except Exception as e:
print(e)
db.clear_chat(user_id)
db.plus_error(token)
db.delete_token_if(token)
return 0
finally:
if 'client' in data.keys():
await data['client'].close()
Два метода для взаимодействия ТГ бота и GPT-3.5 TURBO. Есть утечка памяти вызванная методом _response у openai. Уже неделю ищу причину. Так и не могу найти.