将ai接口移至quickcommand,支持在配置菜单全局配置ai接口

This commit is contained in:
fofolee 2025-02-18 00:59:28 +08:00
parent 3eca3b448e
commit 9e00a08253
13 changed files with 393 additions and 257 deletions

View File

@ -157,9 +157,9 @@ JS代码注入、脚本执行支持多种语言、函数返回、变量管
Windows特定功能
窗口控制(置顶、透明度、位置)、消息发送、文件系统监控、进程管理、注册表操作、服务管理、快捷方式管理、系统设置修改...
## AI对话
## AI问答
获取API支持的模型列表、AI对话...
AI问答自由问答、翻译、总结、生成shell代码...
## 视频处理

View File

@ -51,7 +51,7 @@ const PRESET_PROMPTS = {
* @param {string} content.presetPrompt - 预设提示词类型
* @returns {Promise<Object>} 对话响应
*/
async function chat(apiConfig, content) {
async function chat(content, apiConfig) {
try {
const { modelType, apiUrl, apiToken, model } = apiConfig;
const { prompt, presetPrompt } = content;

View File

@ -5,6 +5,9 @@ const kill = require("tree-kill");
const iconv = require("iconv-lite");
const path = require("path");
const axios = require("axios");
const { chat, getModels } = require("./ai");
window.getModelsFromAiApi = getModels;
const systemDialog = require("./dialog/service");
@ -184,6 +187,11 @@ const quickcommand = {
}
return null;
},
askAI: async function (content, apiConfig) {
return await chat(content, apiConfig);
},
...systemDialog,
};

View File

@ -13,7 +13,6 @@ const quickcomposer = {
status: require("./quickcomposer/status"),
browser: require("./quickcomposer/browser"),
video: require("./quickcomposer/video"),
ai: require("./quickcomposer/ai"),
};
module.exports = quickcomposer;

View File

@ -1,6 +0,0 @@
const { chat, getModels } = require("./chat");
module.exports = {
chat,
getModels,
};

View File

@ -129,19 +129,19 @@ export default defineComponent({
},
outPlugin() {
// 退RunCodeRunComposer
if (!["code", "composer"].includes(this.$route.name)) return;
if (["code", "composer"].includes(this.$route.name)) {
let currentCommand = window.lodashM.cloneDeep(
this.commandManager.state.currentCommand
);
let currentCommand = window.lodashM.cloneDeep(
this.commandManager.state.currentCommand
);
if (this.$route.name === "composer") {
currentCommand =
this.commandManager.getLitedComposerCommand(currentCommand);
}
if (this.$route.name === "composer") {
currentCommand =
this.commandManager.getLitedComposerCommand(currentCommand);
dbManager.putDB(currentCommand, `cfg_${this.$route.name}History`);
}
dbManager.putDB(currentCommand, `cfg_${this.$route.name}History`);
this.$router.push("/");
this.saveProfile();
},

View File

@ -0,0 +1,179 @@
<template>
<div>
<div class="q-my-md">
<BorderLabel label="API配置">
<ButtonGroup
:model-value="argvs.apiConfig.modelType"
@update:modelValue="updateArgvs('apiConfig.modelType', $event)"
:options="modelTypeOptions"
height="26px"
class="q-mb-sm"
/>
<VariableInput
:model-value="argvs.apiConfig.apiUrl"
label="API地址"
:placeholder="
argvs.apiConfig.modelType === 'openai'
? '例https://api.openai.com/v1'
: '例http://localhost:11434'
"
@update:modelValue="updateArgvs('apiConfig.apiUrl', $event)"
class="q-mb-sm"
/>
<div class="row q-gutter-sm">
<VariableInput
class="col"
v-if="argvs.apiConfig.modelType === 'openai'"
:model-value="argvs.apiConfig.apiToken"
@update:modelValue="updateArgvs('apiConfig.apiToken', $event)"
label="API密钥"
/>
<VariableInput
class="col"
:model-value="argvs.apiConfig.model"
@update:modelValue="updateArgvs('apiConfig.model', $event)"
label="模型"
:placeholder="
argvs.apiConfig.modelType === 'openai'
? '例gpt-4o'
: '例qwen2.5:32b'
"
/>
</div>
</BorderLabel>
</div>
<ButtonGroup
:model-value="argvs.content.presetPrompt"
@update:modelValue="updateArgvs('content.presetPrompt', $event)"
:options="presetPromptOptions"
height="26px"
class="q-mb-sm"
/>
<VariableInput
:model-value="argvs.content.prompt"
@update:modelValue="updateArgvs('content.prompt', $event)"
label="提示词"
type="textarea"
autogrow
/>
</div>
</template>
<script>
import { defineComponent } from "vue";
import BorderLabel from "components/composer/common/BorderLabel.vue";
import ButtonGroup from "components/composer/common/ButtonGroup.vue";
import { newVarInputVal } from "js/composer/varInputValManager";
import VariableInput from "components/composer/common/VariableInput.vue";
import { parseFunction, stringifyArgv } from "js/composer/formatString";
export default defineComponent({
name: "AskAIEditor",
props: {
modelValue: Object,
},
components: {
VariableInput,
BorderLabel,
ButtonGroup,
},
emits: ["update:modelValue"],
data() {
return {
defaultArgvs: {
content: {
prompt: newVarInputVal("str"),
presetPrompt: "",
},
apiConfig: {},
},
presetPromptOptions: [
{ label: "自由问答", value: "" },
{ label: "翻译", value: "translate" },
{ label: "总结", value: "summarize" },
{ label: "执行shell命令", value: "shell" },
],
modelTypeOptions: [
{ label: "OpenAI", value: "openai" },
{ label: "Ollama", value: "ollama" },
],
};
},
computed: {
argvs() {
return (
this.modelValue.argvs || this.parseCodeToArgvs(this.modelValue.code)
);
},
},
methods: {
parseCodeToArgvs(code) {
const argvs = window.lodashM.cloneDeep(this.defaultArgvs);
if (!code) return argvs;
try {
const variableFormatPaths = [
"arg0.prompt",
"arg1.apiUrl",
"arg1.apiToken",
"arg1.model",
];
const params = parseFunction(code, { variableFormatPaths });
return {
content: params.argvs[0],
apiConfig: params.argvs[1],
};
} catch (e) {
console.error("解析参数失败:", e);
}
return argvs;
},
generateCode(argvs = this.argvs) {
return `${this.modelValue.value}(${stringifyArgv(
argvs.content
)}, ${stringifyArgv(argvs.apiConfig)})`;
},
getSummary(argvs) {
return "问AI" + argvs.content.prompt;
},
updateArgvs(keyPath, newValue) {
const newArgvs = { ...this.argvs };
const keys = keyPath.split(".");
const lastKey = keys.pop();
const target = keys.reduce((obj, key) => obj[key], newArgvs);
target[lastKey] = newValue;
this.updateModelValue(newArgvs);
},
updateModelValue(argvs) {
this.$emit("update:modelValue", {
...this.modelValue,
summary: this.getSummary(argvs),
argvs,
code: this.generateCode(argvs),
});
},
},
mounted() {
const aiConfig = this.$root.profile.aiConfig || {};
console.log("aiConfig", aiConfig);
this.defaultArgvs.apiConfig = {
modelType: aiConfig.modelType || "openai",
apiUrl: newVarInputVal("str", aiConfig.apiUrl || ""),
apiToken: newVarInputVal("str", aiConfig.apiToken || ""),
model: newVarInputVal("str", aiConfig.model || ""),
};
const argvs = this.modelValue.argvs || this.defaultArgvs;
if (!this.modelValue.code) {
this.updateModelValue(argvs);
}
},
});
</script>
<style scoped>
.return-label {
display: flex;
align-items: center;
justify-content: center;
}
</style>

View File

@ -51,6 +51,14 @@
<PersonalizeMenu />
</q-item>
<!-- AI配置 -->
<q-item clickable v-close-popup @click="showAIConfig = true">
<q-item-section side>
<q-icon name="keyboard_arrow_left" />
</q-item-section>
<q-item-section>AI配置</q-item-section>
</q-item>
<!-- 收藏 -->
<q-item
v-if="activatedQuickPanels.includes(currentTag)"
@ -99,6 +107,10 @@
<q-dialog v-model="showUserData">
<UserData :showInsertBtn="false" />
</q-dialog>
<q-dialog v-model="showAIConfig">
<AIConfig />
</q-dialog>
</div>
</template>
@ -110,6 +122,7 @@ import CommandManageMenu from "components/menu/CommandManageMenu.vue";
import UtilityFeaturesMenu from "components/menu/UtilityFeaturesMenu.vue";
import EnvConfigMenu from "components/menu/EnvConfigMenu.vue";
import PersonalizeMenu from "components/menu/PersonalizeMenu.vue";
import AIConfig from "components/popup/AIConfig.vue";
import UserData from "components/popup/UserData.vue";
import { utoolsFull } from "js/utools.js";
import { useCommandManager } from "js/commandManager";
@ -125,6 +138,7 @@ export default {
EnvConfigMenu,
PersonalizeMenu,
UserData,
AIConfig,
},
data() {
return {
@ -132,6 +146,7 @@ export default {
showAbout: false,
showPanelConf: false,
showUserData: false,
showAIConfig: false,
utools: utoolsFull,
};
},

View File

@ -0,0 +1,113 @@
<template>
<q-card style="width: 600px" class="q-pa-md">
<q-card-section class="text-h5"> API配置 </q-card-section>
<q-card-section>
<ButtonGroup
v-model="modelType"
:options="[
{ label: 'OPENAI', value: 'openai' },
{ label: 'OLLAMA', value: 'ollama' },
]"
/>
</q-card-section>
<q-card-section class="q-gutter-sm column">
<q-input outlined dense v-model="apiUrl">
<template v-slot:prepend>
<q-badge
color="primary"
text-color="black"
label="API地址"
class="q-pa-xs"
/>
</template>
</q-input>
<q-input outlined dense v-model="apiToken" v-if="modelType === 'openai'">
<template v-slot:prepend>
<q-badge
color="primary"
text-color="black"
label="API令牌"
class="q-pa-xs"
/>
</template>
</q-input>
<q-select
outlined
dense
v-model="model"
:options="models"
@focus="getModels"
>
<template v-slot:prepend>
<q-badge
color="primary"
text-color="black"
label="模型名称"
class="q-pa-xs"
/>
</template>
</q-select>
</q-card-section>
<q-card-section class="flex justify-end q-gutter-sm">
<q-btn flat color="grey" label="取消" v-close-popup />
<q-btn
flat
color="primary"
label="保存"
v-close-popup
@click="saveConfig"
/>
</q-card-section>
</q-card>
</template>
<script>
import { defineComponent } from "vue";
import ButtonGroup from "components/composer/common/ButtonGroup.vue";
export default defineComponent({
name: "AIConfig",
components: {
ButtonGroup,
},
data() {
return {
modelType: "openai",
apiUrl: "",
apiToken: "",
model: "",
models: [],
};
},
methods: {
async getModels() {
try {
const { success, result } = await window.getModelsFromAiApi({
modelType: this.modelType,
apiUrl: this.apiUrl,
apiToken: this.apiToken,
});
this.models = success ? result : [];
} catch (_) {
this.models = [];
}
},
saveConfig() {
this.$root.profile.aiConfig = {
modelType: this.modelType,
apiUrl: this.apiUrl,
apiToken: this.apiToken,
model: this.model,
};
console.log("saveConfig", this.$root.profile.aiConfig);
},
},
mounted() {
const aiConfig = this.$root.profile.aiConfig || {};
this.modelType = aiConfig.modelType || "openai";
this.apiUrl = aiConfig.apiUrl || "";
this.apiToken = aiConfig.apiToken || "";
this.model = aiConfig.model || "";
},
});
</script>

View File

@ -59,3 +59,8 @@ export const ReturnEditor = defineAsyncComponent(() =>
export const ScriptEditor = defineAsyncComponent(() =>
import("components/composer/script/ScriptEditor.vue")
);
// AI组件
export const AskAIEditor = defineAsyncComponent(() =>
import("src/components/composer/ai/AskAIEditor.vue")
);

View File

@ -6,150 +6,11 @@ export const aiCommands = {
defaultOpened: false,
commands: [
{
value: "quickcomposer.ai.getModels",
label: "获取可用模型",
desc: "获取API支持的模型列表",
asyncMode: "await",
icon: "list",
config: [
{
label: "API配置",
component: "OptionEditor",
icon: "settings",
width: 12,
options: {
modelType: {
component: "ButtonGroup",
width: 12,
height: "26px",
options: [
{ label: "OpenAI", value: "openai" },
{ label: "Ollama", value: "ollama" },
],
},
apiUrl: {
label: "API地址",
component: "VariableInput",
icon: "link",
width: 12,
placeholder: "输入API地址",
},
apiToken: {
label: "API令牌",
component: "VariableInput",
icon: "key",
width: 12,
placeholder: "ollama 则留空",
},
},
defaultValue: {
modelType: "openai",
apiUrl: newVarInputVal("str", "https://api.openai.com/v1/models"),
},
},
],
outputs: {
label: "获取模型列表结果",
suggestName: "modelListResult",
structure: {
success: {
label: "是否成功",
suggestName: "isSuccess",
},
result: {
label: "模型列表",
suggestName: "modelList",
},
error: {
label: "错误信息",
suggestName: "resultErrorInfo",
},
},
},
},
{
value: "quickcomposer.ai.chat",
label: "AI对话",
desc: "与AI助手进行对话",
value: "quickcommand.askAI",
label: "AI问答",
asyncMode: "await",
icon: "chat",
config: [
{
label: "API配置",
component: "OptionEditor",
icon: "settings",
width: 12,
options: {
modelType: {
component: "ButtonGroup",
width: 12,
height: "26px",
options: [
{ label: "OpenAI", value: "openai" },
{ label: "Ollama", value: "ollama" },
],
},
apiUrl: {
label: "API地址",
component: "VariableInput",
icon: "link",
width: 12,
placeholder: "输入API地址",
},
apiToken: {
label: "API令牌",
component: "VariableInput",
icon: "key",
width: 6,
placeholder: "ollama 则留空",
},
model: {
label: "模型名称",
component: "VariableInput",
icon: "smart_toy",
width: 6,
placeholder: "如 gpt-3.5-turbo",
},
},
defaultValue: {
modelType: "openai",
apiUrl: newVarInputVal(
"str",
"https://api.openai.com/v1/chat/completions"
),
model: newVarInputVal("str", "gpt-3.5-turbo"),
},
},
{
label: "对话内容",
component: "OptionEditor",
icon: "chat",
width: 12,
options: {
presetPrompt: {
component: "ButtonGroup",
width: 12,
height: "26px",
options: [
{ label: "自由对话", value: "" },
{ label: "翻译", value: "translate" },
{ label: "总结", value: "summarize" },
{ label: "生成SHELL命令", value: "shell" },
],
},
prompt: {
label: "提示词",
component: "VariableInput",
icon: "edit",
width: 12,
placeholder: "输入要询问AI的内容",
},
},
defaultValue: {
presetPrompt: "",
},
},
],
component: "AskAIEditor",
outputs: {
label: "AI响应",
suggestName: "aiResponse",

View File

@ -888,6 +888,64 @@ interface quickcommandApi {
* ```
*/
closeLoadingBar(loadingBar?: { id: number; close: () => void }): void;
/**
* AI
* @param content
* @param apiConfig API配置
* @example
* // OpenAI 示例
* const response = await quickcommand.askAI(
* {
* prompt: "你好",
* presetPrompt: "" // 使用预设提示词translate/shell/summarize
* },
* {
* modelType: "openai",
* apiUrl: "https://api.openai.com/v1/chat/completions",
* apiToken: "your-api-token",
* model: "gpt-3.5-turbo"
* }
* );
*
* // Ollama 示例
* const response = await quickcommand.askAI(
* {
* prompt: "查找进程名为chrome的进程并关闭",
* presetPrompt: "shell"
* },
* {
* modelType: "ollama",
* apiUrl: "http://localhost:11434/api/generate",
* model: "qwen2.5:32b"
* }
* );
*/
askAI(
content: {
/** 提示词 */
prompt: string;
/** 预设提示词类型 */
presetPrompt?: "" | "translate" | "shell" | "summarize";
},
apiConfig: {
/** 模型类型openai/ollama */
modelType: "openai" | "ollama";
/** API地址 */
apiUrl: string;
/** API令牌仅 OpenAI 需要) */
apiToken?: string;
/** 模型名称 */
model: string;
}
): Promise<{
/** 是否成功 */
success: boolean;
/** AI 响应内容 */
result?: string;
/** 错误信息 */
error?: string;
}>;
}
declare var quickcommand: quickcommandApi;

View File

@ -2512,100 +2512,4 @@ interface quickcomposerApi {
*/
waitForElement(selector: string, timeout?: number): Promise<void>;
};
/**
* AI
*/
ai: {
/**
* AI
* @param apiConfig API配置
* @param content
* @example
* // OpenAI 示例
* const response = await quickcomposer.ai.chat(
* {
* modelType: "openai",
* apiUrl: "https://api.openai.com/v1/chat/completions",
* apiToken: "your-api-token",
* model: "gpt-3.5-turbo"
* },
* {
* prompt: "你好",
* presetPrompt: "" // 使用预设提示词translate/shell/summarize
* }
* );
*
* // Ollama 示例
* const response = await quickcomposer.ai.chat(
* {
* modelType: "ollama",
* apiUrl: "http://localhost:11434/api/generate",
* model: "qwen2.5:32b"
* },
* {
* prompt: "查找进程名为chrome的进程并关闭",
* presetPrompt: "shell"
* }
* );
*/
chat(
apiConfig: {
/** 模型类型openai/ollama */
modelType: "openai" | "ollama";
/** API地址 */
apiUrl: string;
/** API令牌仅 OpenAI 需要) */
apiToken?: string;
/** 模型名称 */
model: string;
},
content: {
/** 提示词 */
prompt: string;
/** 预设提示词类型 */
presetPrompt?: "" | "translate" | "shell" | "summarize";
}
): Promise<{
/** 是否成功 */
success: boolean;
/** AI 响应内容 */
result?: string;
/** 错误信息 */
error?: string;
}>;
/**
* API
* @param apiConfig API配置
* @example
* // OpenAI 示例
* const models = await quickcomposer.ai.getModels({
* modelType: "openai",
* apiUrl: "https://api.openai.com/v1/models",
* apiToken: "your-api-token"
* });
*
* // Ollama 示例
* const models = await quickcomposer.ai.getModels({
* modelType: "ollama",
* apiUrl: "http://localhost:11434"
* });
*/
getModels(apiConfig: {
/** 模型类型openai/ollama */
modelType: "openai" | "ollama";
/** API地址 */
apiUrl: string;
/** API令牌仅 OpenAI 需要) */
apiToken?: string;
}): Promise<{
/** 是否成功 */
success: boolean;
/** 模型名称列表 */
result?: string[];
/** 错误信息 */
error?: string;
}>;
};
}