add asyncio

This commit is contained in:
bridge
2025-09-02 22:16:03 +08:00
parent 0f9f5f35f7
commit bd28da21f5
7 changed files with 90 additions and 28 deletions

View File

@@ -2,6 +2,7 @@ from litellm import completion
from langchain.prompts import PromptTemplate
from pathlib import Path
import json
import asyncio
from src.utils.config import CONFIG
from src.utils.io import read_txt
@@ -36,6 +37,18 @@ def call_llm(prompt: str) -> str:
# 返回生成的内容
return response.choices[0].message.content
async def call_llm_async(prompt: str) -> str:
"""
异步调用LLM
Args:
prompt: 输入的提示词
Returns:
str: LLM返回的结果
"""
# 使用asyncio.to_thread包装同步调用
return await asyncio.to_thread(call_llm, prompt)
def get_prompt_and_call_llm(template_path: Path, infos: dict) -> str:
"""
根据模板获取提示词并调用LLM
@@ -48,9 +61,28 @@ def get_prompt_and_call_llm(template_path: Path, infos: dict) -> str:
# print(f"res = {res}")
return json_res
async def get_prompt_and_call_llm_async(template_path: Path, infos: dict) -> str:
"""
异步版本根据模板获取提示词并调用LLM
"""
template = read_txt(template_path)
prompt = get_prompt(template, infos)
res = await call_llm_async(prompt)
json_res = json.loads(res)
print(f"prompt = {prompt}")
print(f"res = {res}")
return json_res
def get_ai_prompt_and_call_llm(infos: dict) -> dict:
"""
根据模板获取提示词并调用LLM
"""
template_path = CONFIG.paths.templates / "ai.txt"
return get_prompt_and_call_llm(template_path, infos)
return get_prompt_and_call_llm(template_path, infos)
async def get_ai_prompt_and_call_llm_async(infos: dict) -> dict:
"""
异步版本根据模板获取提示词并调用LLM
"""
template_path = CONFIG.paths.templates / "ai.txt"
return await get_prompt_and_call_llm_async(template_path, infos)