mirror of
https://github.com/hellodigua/ChatLab.git
synced 2026-05-17 03:50:42 +08:00
style: auto-format code (eslint --fix)
Formatting-only changes from ESLint auto-fix, no logic changes. Made-with: Cursor
This commit is contained in:
@@ -7,20 +7,12 @@ import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
import { randomUUID } from 'crypto'
|
||||
import { getAiDataDir } from '../../paths'
|
||||
import type {
|
||||
LLMProvider,
|
||||
ProviderInfo,
|
||||
AIServiceConfig,
|
||||
AIConfigStore,
|
||||
} from './types'
|
||||
import type { LLMProvider, ProviderInfo, AIServiceConfig, AIConfigStore } from './types'
|
||||
import { MAX_CONFIG_COUNT } from './types'
|
||||
import { aiLogger } from '../logger'
|
||||
import { encryptApiKey, decryptApiKey, isEncrypted } from './crypto'
|
||||
import { t } from '../../i18n'
|
||||
import {
|
||||
completeSimple,
|
||||
type Model as PiModel,
|
||||
} from '@mariozechner/pi-ai'
|
||||
import { completeSimple, type Model as PiModel } from '@mariozechner/pi-ai'
|
||||
|
||||
// 导出类型
|
||||
export * from './types'
|
||||
@@ -456,9 +448,7 @@ export function getProviderInfo(provider: LLMProvider): ProviderInfo | null {
|
||||
/**
|
||||
* 将 AIServiceConfig 转换为 pi-ai Model 对象
|
||||
*/
|
||||
export function buildPiModel(
|
||||
config: AIServiceConfig
|
||||
): PiModel<'openai-completions'> | PiModel<'google-generative-ai'> {
|
||||
export function buildPiModel(config: AIServiceConfig): PiModel<'openai-completions'> | PiModel<'google-generative-ai'> {
|
||||
const providerInfo = getProviderInfo(config.provider)
|
||||
const baseUrl = config.baseUrl || providerInfo?.defaultBaseUrl || ''
|
||||
const modelId = config.model || providerInfo?.models?.[0]?.id || ''
|
||||
@@ -523,13 +513,17 @@ export async function validateApiKey(
|
||||
const timeout = setTimeout(() => abortController.abort(), 15000)
|
||||
|
||||
try {
|
||||
await completeSimple(piModel, {
|
||||
messages: [{ role: 'user', content: 'Hi', timestamp: Date.now() }],
|
||||
}, {
|
||||
apiKey,
|
||||
maxTokens: 1,
|
||||
signal: abortController.signal,
|
||||
})
|
||||
await completeSimple(
|
||||
piModel,
|
||||
{
|
||||
messages: [{ role: 'user', content: 'Hi', timestamp: Date.now() }],
|
||||
},
|
||||
{
|
||||
apiKey,
|
||||
maxTokens: 1,
|
||||
signal: abortController.signal,
|
||||
}
|
||||
)
|
||||
return { success: true }
|
||||
} finally {
|
||||
clearTimeout(timeout)
|
||||
|
||||
@@ -40,15 +40,19 @@ async function rewriteQuery(query: string, abortSignal?: AbortSignal): Promise<s
|
||||
const piModel = buildPiModel(activeConfig)
|
||||
const prompt = QUERY_REWRITE_PROMPT.replace('{query}', query)
|
||||
|
||||
const result = await completeSimple(piModel, {
|
||||
systemPrompt: '你是一个查询优化专家,专门将用户问题改写为更适合语义检索的形式。',
|
||||
messages: [{ role: 'user', content: prompt, timestamp: Date.now() }],
|
||||
}, {
|
||||
apiKey: activeConfig.apiKey,
|
||||
temperature: 0.3,
|
||||
maxTokens: 200,
|
||||
signal: abortSignal,
|
||||
})
|
||||
const result = await completeSimple(
|
||||
piModel,
|
||||
{
|
||||
systemPrompt: '你是一个查询优化专家,专门将用户问题改写为更适合语义检索的形式。',
|
||||
messages: [{ role: 'user', content: prompt, timestamp: Date.now() }],
|
||||
},
|
||||
{
|
||||
apiKey: activeConfig.apiKey,
|
||||
temperature: 0.3,
|
||||
maxTokens: 200,
|
||||
signal: abortSignal,
|
||||
}
|
||||
)
|
||||
|
||||
const rewritten = result.content
|
||||
.filter((item): item is PiTextContent => item.type === 'text')
|
||||
|
||||
@@ -28,14 +28,18 @@ async function llmComplete(
|
||||
const piModel = buildPiModel(activeConfig)
|
||||
const now = Date.now()
|
||||
|
||||
const result = await completeSimple(piModel, {
|
||||
systemPrompt,
|
||||
messages: [{ role: 'user', content: userPrompt, timestamp: now }],
|
||||
}, {
|
||||
apiKey: activeConfig.apiKey,
|
||||
temperature: options?.temperature,
|
||||
maxTokens: options?.maxTokens,
|
||||
})
|
||||
const result = await completeSimple(
|
||||
piModel,
|
||||
{
|
||||
systemPrompt,
|
||||
messages: [{ role: 'user', content: userPrompt, timestamp: now }],
|
||||
},
|
||||
{
|
||||
apiKey: activeConfig.apiKey,
|
||||
temperature: options?.temperature,
|
||||
maxTokens: options?.maxTokens,
|
||||
}
|
||||
)
|
||||
|
||||
return result.content
|
||||
.filter((item): item is PiTextContent => item.type === 'text')
|
||||
@@ -438,11 +442,10 @@ export async function generateSessionSummary(
|
||||
* 直接生成摘要(适用于短会话)
|
||||
*/
|
||||
async function generateDirectSummary(content: string, lengthLimit: number, locale: string): Promise<string> {
|
||||
const result = await llmComplete(
|
||||
t('summary.systemPromptDirect'),
|
||||
buildSummaryPrompt(content, lengthLimit, locale),
|
||||
{ temperature: 0.3, maxTokens: 300 }
|
||||
)
|
||||
const result = await llmComplete(t('summary.systemPromptDirect'), buildSummaryPrompt(content, lengthLimit, locale), {
|
||||
temperature: 0.3,
|
||||
maxTokens: 300,
|
||||
})
|
||||
return result.trim()
|
||||
}
|
||||
|
||||
@@ -462,11 +465,10 @@ async function generateMapReduceSummary(
|
||||
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
const segmentContent = formatMessages(segments[i])
|
||||
const result = await llmComplete(
|
||||
t('summary.systemPromptDirect'),
|
||||
buildSubSummaryPrompt(segmentContent, locale),
|
||||
{ temperature: 0.3, maxTokens: 100 }
|
||||
)
|
||||
const result = await llmComplete(t('summary.systemPromptDirect'), buildSubSummaryPrompt(segmentContent, locale), {
|
||||
temperature: 0.3,
|
||||
maxTokens: 100,
|
||||
})
|
||||
subSummaries.push(result.trim())
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user