mirror of
https://github.com/fofolee/uTools-quickcommand.git
synced 2025-06-07 13:34:08 +08:00
修复使用openai接口时reasoncontent未正确识别的bug
This commit is contained in:
parent
d4e58e58be
commit
55516159ba
@ -167,37 +167,8 @@ function parseModelsResponse(response, apiType) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 处理 OpenAI 流式响应
|
|
||||||
async function handleOpenAIStreamResponse(line, onStream) {
|
|
||||||
if (line.startsWith("data:")) {
|
|
||||||
const jsonStr = line.replace(/^data:[ ]*/, "");
|
|
||||||
if (jsonStr === "[DONE]") {
|
|
||||||
onStream("", true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const json = JSON.parse(jsonStr);
|
|
||||||
const content = json.choices[0]?.delta?.content;
|
|
||||||
if (content) {
|
|
||||||
onStream(content, false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 处理 Ollama 流式响应
|
|
||||||
async function handleOllamaStreamResponse(line, onStream) {
|
|
||||||
const json = JSON.parse(line);
|
|
||||||
if (json.done) {
|
|
||||||
onStream("", true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (json.message?.content) {
|
|
||||||
onStream(json.message.content, false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let reasoning_content_start = false;
|
let reasoning_content_start = false;
|
||||||
// 处理 uTools AI 流式响应
|
function processContentWithReason(response, onStream) {
|
||||||
async function handleUToolsAIStreamResponse(response, onStream) {
|
|
||||||
if (response.reasoning_content) {
|
if (response.reasoning_content) {
|
||||||
if (!reasoning_content_start) {
|
if (!reasoning_content_start) {
|
||||||
reasoning_content_start = true;
|
reasoning_content_start = true;
|
||||||
@ -214,6 +185,40 @@ async function handleUToolsAIStreamResponse(response, onStream) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 处理 OpenAI 流式响应
|
||||||
|
async function handleOpenAIStreamResponse(line, onStream) {
|
||||||
|
if (line.startsWith("data:")) {
|
||||||
|
const jsonStr = line.replace(/^data:[ ]*/, "");
|
||||||
|
if (jsonStr === "[DONE]") {
|
||||||
|
onStream("", true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const json = JSON.parse(jsonStr);
|
||||||
|
const response = json.choices[0]?.delta;
|
||||||
|
if (response) {
|
||||||
|
processContentWithReason(response, onStream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 处理 Ollama 流式响应
|
||||||
|
async function handleOllamaStreamResponse(line, onStream) {
|
||||||
|
const json = JSON.parse(line);
|
||||||
|
if (json.done) {
|
||||||
|
onStream("", true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const response = json.message;
|
||||||
|
if (response) {
|
||||||
|
processContentWithReason(response, onStream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 处理 uTools AI 流式响应
|
||||||
|
async function handleUToolsAIStreamResponse(response, onStream) {
|
||||||
|
processContentWithReason(response, onStream);
|
||||||
|
}
|
||||||
|
|
||||||
// 处理流式响应
|
// 处理流式响应
|
||||||
async function handleStreamResponse(response, apiConfig, onStream) {
|
async function handleStreamResponse(response, apiConfig, onStream) {
|
||||||
// 处理 uTools AI 响应
|
// 处理 uTools AI 响应
|
||||||
@ -403,7 +408,7 @@ async function chat(content, apiConfig, options = {}) {
|
|||||||
API_ENDPOINTS[apiConfig.apiType].chat
|
API_ENDPOINTS[apiConfig.apiType].chat
|
||||||
);
|
);
|
||||||
const config = buildRequestConfig(apiConfig);
|
const config = buildRequestConfig(apiConfig);
|
||||||
const requestData = (content, apiConfig);
|
const requestData = buildRequestData(content, apiConfig);
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user