refactor llm config
This commit is contained in:
@@ -2,13 +2,6 @@ meta:
|
||||
version: "1.0.9"
|
||||
|
||||
llm:
|
||||
# 目前默认用的是阿里的千问大模型 api。
|
||||
# 如果你想用别家的,需要对应修改下面的base_url为对应的模型。
|
||||
# 填入对应的密钥,并且修改model_name和fast_model_name为对应的模型。
|
||||
key: "你的密钥"
|
||||
base_url: "https://dashscope.aliyuncs.com/compatible-mode/v1"
|
||||
model_name: "openai/qwen-plus" # 聪明的模型,负责难的任务
|
||||
fast_model_name: "openai/qwen-flash" # 快速的模型,负责简单的任务
|
||||
default_modes:
|
||||
action_decision: "normal"
|
||||
long_term_objective: "normal"
|
||||
@@ -17,7 +10,6 @@ llm:
|
||||
relation_resolver: "fast"
|
||||
story_teller: "fast"
|
||||
interaction_feedback: "fast"
|
||||
mode: "default" # default: 使用default_modes中的模式,normal: 均使用normal模式,fast: 均使用fast模式
|
||||
|
||||
paths:
|
||||
templates: static/templates/
|
||||
|
||||
Reference in New Issue
Block a user