Browse Source

change ai model

master
godo 6 months ago
parent
commit
2b4a894e54
  1. 10
      frontend/README.md
  2. 214
      frontend/src/components/ai/DownAddbox.vue
  3. 17
      frontend/src/components/ai/DownLabeleditor.vue
  4. 27
      frontend/src/components/ai/aisetting.vue
  5. 2
      frontend/src/i18n/lang/zh.json
  6. 122
      frontend/src/stores/model.ts
  7. 149
      frontend/src/stores/modelconfig.ts
  8. 2
      godo/model/types.go

10
frontend/README.md

@ -1 +1,9 @@
## godoos
## godoos
## 什么是模型标签:
模型标签是godoos框架的一个核心概念,godoos框架中的模型标签用于描述一个模型的归类,比如这个模型属于哪个公司,模型的大类名称,模型的分类等等。
## 什么是模型引擎:
godoos框架中提供了两种模型引擎:本地引擎和网络引擎,本地引擎用于本地模型调用,网络引擎用于网络模型调用

214
frontend/src/components/ai/DownAddbox.vue

@ -1,41 +1,64 @@
<script setup lang="ts">
import { useModelStore } from "@/stores/model";
import { notifyError } from "@/util/msg";
import { ref, toRaw } from "vue";
import { ref, toRaw, computed } from "vue";
import { t } from "@/i18n/index";
const modelStore = useModelStore();
const fromSource = [
{
label: "ollama",
value: "ollama",
},
{
label: t("model.local"),
value: "local",
},
{
label: t("model.network"),
value: "network",
},
];
const formInit = {
from: "ollama",
file_name: "",
model: "",
labelId: "",
url: "",
ip: "",
pb:"",
context_length: "",
engine: "ollama",
template: "",
parameters: "",
quant: "q4_K_M",
info: {},
info: {
url: "",
from: "ollama",
file_name: "",
context_length: "",
engine: "ollama",
template: "",
parameters: "",
quant: "q4_K_M",
pb: "",
},
type: "",
};
const formData = ref(formInit);
const fromSource = computed(() => {
if (formData.value.info.engine == "ollama") {
return [
{
label: "ollama.com",
value: "ollama",
},
{
label: t("model.network"),
value: "network",
},
{
label: t("model.local"),
value: "local",
},
]
} else {
return [
{
label: t("model.network"),
value: "network",
},
{
label: t("model.local"),
value: "local",
},
]
}
});
function setFrom(val: string) {
if (val == "ollama") {
formData.value.info.from = "ollama"
} else {
formData.value.info.from = "network"
}
}
const emit = defineEmits(["closeFn", "saveFn"]);
const localModels: any = ref([]);
async function getLocalModel() {
@ -72,16 +95,17 @@ function setLocalInfo() {
modelData = toRaw(modelData);
const urls: any = [];
const url = `http://${formData.value.ip}:56780/ai/server?path=`;
modelData.paths.forEach((item: any) => {
modelData.info.path.forEach((item: any) => {
urls.push(url + item);
});
formData.value.url = urls;
//formData.value.info.url = urls;
modelData.info.url = urls;
formData.value.info = modelData.info;
formData.value.file_name = modelData.file_name;
formData.value.engine = modelData.engine;
if (modelData.engine == "ollama") {
formData.value.type = "local";
}
//formData.value.file_name = modelData.file_name;
//formData.value.engine = modelData.engine;
// if (modelData.engine == "ollama") {
// formData.value.type = "local";
// }
}
async function download() {
const saveData: any = toRaw(formData.value);
@ -134,7 +158,7 @@ async function download() {
num_predict: 1,
top_k: 40,
temperature: 0.7,
};
if (saveData.parameters != "" && typeof saveData.parameters === "string") {
saveData.parameters = saveData.parameters.split("\n");
@ -146,7 +170,7 @@ async function download() {
context_length: saveData.context_length,
template: saveData.template,
parameters: saveData.parameters,
pb:saveData.pb.toUpperCase(),
pb: saveData.pb.toUpperCase(),
};
const lowerName = saveData.info.pb.replace("B", "") * 1;
if (lowerName < 3) {
@ -156,7 +180,7 @@ async function download() {
else if (lowerName < 9) {
saveData.info.cpu = "16GB";
saveData.info.gpu = "8GB";
}else{
} else {
saveData.info.cpu = "32GB";
saveData.info.gpu = "12GB";
}
@ -171,113 +195,67 @@ async function download() {
</script>
<template>
<el-form ref="form" :model="formData" label-width="150px" style="margin-top: 15px">
<el-form-item :label="t('model.selectSource')">
<el-select v-model="formData.from">
<el-option
v-for="(item, key) in fromSource"
:key="key"
:label="item.label"
:value="item.value"
/>
<el-form-item :label="t('model.selectLabel')">
<el-select v-model="formData.labelId">
<el-option v-for="(item, key) in modelStore.labelList" :key="key" :label="item.name" :value="item.id" />
</el-select>
</el-form-item>
<el-form-item :label="t('model.modelName')" v-if="formData.from !== 'local'">
<el-input
v-model="formData.model"
prefix-icon="House"
clearable
:placeholder="t('model.enterModelName')"
></el-input>
<el-form-item :label="t('model.selectEngine')">
<el-select v-model="formData.info.engine" :placeholder="t('model.selectEngine')" @change="setFrom">
<el-option v-for="item, key in modelStore.modelEngines" :key="key" :label="item.name" :value="item.name" />
</el-select>
</el-form-item>
<el-form-item :label="t('model.selectModel')">
<el-select v-model="formData.labelId">
<el-option
v-for="(item, key) in modelStore.labelList"
:key="key"
:label="item.name"
:value="item.id"
/>
<el-form-item :label="t('model.selectSource')">
<el-select v-model="formData.info.from">
<el-option v-for="(item, key) in fromSource" :key="key" :label="item.label" :value="item.value" />
</el-select>
</el-form-item>
<template v-if="formData.from === 'local'">
<el-form-item :label="t('model.modelName')" v-if="formData.info.from !== 'local'">
<el-input v-model="formData.model" prefix-icon="House" clearable
:placeholder="t('model.enterModelName')"></el-input>
</el-form-item>
<template v-if="formData.info.from === 'local'">
<el-form-item :label="t('model.oppositeIpAddress')">
<el-input
v-model="formData.ip"
prefix-icon="Key"
clearable
placeholder="192.168.1.66"
@blur="getLocalModel"
></el-input>
<el-input v-model="formData.ip" prefix-icon="Key" clearable placeholder="192.168.1.66"
@blur="getLocalModel"></el-input>
</el-form-item>
<el-form-item :label="t('model.selectModel')" v-if="localModels.length > 0">
<el-select v-model="formData.model" @change="setLocalInfo">
<el-option
v-for="(item, key) in localModels"
:key="key"
:label="item.model"
:value="item.model"
/>
<el-option v-for="(item, key) in localModels" :key="key" :label="item.model" :value="item.model" />
</el-select>
</el-form-item>
</template>
<template v-if="formData.from === 'network'">
<template v-if="formData.info.from === 'network'">
<el-form-item :label="t('model.modelUrl')">
<el-input
type="textarea"
:row="3"
v-model="formData.url"
:placeholder="t('model.enterModelUrl')"
></el-input>
<el-input type="textarea" :row="3" v-model="formData.info.url"
:placeholder="t('model.enterModelUrl')"></el-input>
</el-form-item>
<el-form-item :label="t('model.selectEngine')">
<el-select v-model="formData.engine">
<el-option
v-for="(item, key) in modelStore.modelEngines"
:key="key"
:label="item.name"
:value="item.name"
/>
<!-- <el-form-item :label="t('model.selectEngine')">
<el-select v-model="formData.info.engine">
<el-option v-for="(item, key) in modelStore.modelEngines" :key="key" :label="item.name" :value="item.name" />
</el-select>
</el-form-item>
<template v-if="formData.engine === 'ollama'">
</el-form-item> -->
<template v-if="formData.info.engine === 'ollama' && formData.info.from === 'network'">
<el-form-item :label="t('model.template')">
<el-input type="textarea" :row="3" v-model="formData.template"></el-input>
<el-input type="textarea" :row="3" v-model="formData.info.template"></el-input>
</el-form-item>
<el-form-item :label="t('model.contextLength')">
<el-input
type="number"
v-model="formData.context_length"
prefix-icon="Key"
clearable
:placeholder="t('model.enterContextLength')"
></el-input>
<el-input type="number" v-model="formData.info.context_length" prefix-icon="Key" clearable
:placeholder="t('model.enterContextLength')"></el-input>
</el-form-item>
<el-form-item :label="t('model.parameterSettings')">
<el-input
type="textarea"
:row="3"
:placeholder="t('model.onePerLine')"
v-model="formData.parameters"
></el-input>
<el-input type="textarea" :row="3" :placeholder="t('model.onePerLine')"
v-model="formData.info.parameters"></el-input>
</el-form-item>
<el-form-item :label="t('model.parameterSize')">
<el-input
type="number"
v-model="formData.pb"
prefix-icon="Key"
clearable
:placeholder="t('model.enterParameterSize')"
></el-input>
<el-input type="number" v-model="formData.info.pb" prefix-icon="Key" clearable
:placeholder="t('model.enterParameterSize')"></el-input>
</el-form-item>
<el-form-item :label="t('model.selectQuantization')">
<el-select v-model="formData.quant">
<el-option
v-for="(item, key) in modelStore.llamaQuant"
:key="key"
:label="item"
:value="item"
/>
<el-select v-model="formData.info.quant">
<el-option v-for="(item, key) in modelStore.llamaQuant" :key="key" :label="item" :value="item" />
</el-select>
</el-form-item>
</template>

17
frontend/src/components/ai/DownLabeleditor.vue

@ -17,7 +17,6 @@ const labelData:any = ref({
zhdesc : "",
endesc : "",
family: "",
engine:"llm",
action:[],
})
async function save() {
@ -26,10 +25,6 @@ async function save() {
notifyError(t('common.inputTitle'))
return;
}
if(saveData.type == "") {
notifyError(t('model.selectEngine'))
return;
}
if(saveData.action.length == 0){
notifyError(t('model.selectCategory'))
return;
@ -54,7 +49,6 @@ watchEffect(async () => {
zhdesc : "",
endesc : "",
family: "",
engine:"llm",
action:[],
}
}
@ -91,16 +85,7 @@ watchEffect(async () => {
/>
</el-select>
</el-form-item>
<el-form-item :label="t('model.engine')">
<el-select v-model="labelData.engine" :placeholder="t('model.selectEngine')">
<el-option
v-for="item,key in modelStore.modelEngines"
:key="key"
:label="item.name"
:value="item.name"
/>
</el-select>
</el-form-item>
<el-form-item :label="t('model.chineseDescription')">
<el-input

27
frontend/src/components/ai/aisetting.vue

@ -1,5 +1,5 @@
<script lang="ts" setup>
import { onMounted, ref } from "vue";
import { onMounted, ref,watch } from "vue";
import {
getSystemConfig,
setSystemConfig,
@ -40,7 +40,6 @@ const hoverTxt = {
const config: any = ref({});
//const chatConfig: any = ref({});
const currentsModel: any = ref({});
const pageLoading = ref(true);
import type { TabsPaneContext } from "element-plus";
const activeName = ref("system");
@ -91,8 +90,6 @@ const saveConfig = async () => {
}
await changeConfig();
//await modelStore.getModelList();
//modelStore.updateCurrentModels(modelList.value);
notifySuccess(t('common.saveSuccess'));
};
@ -107,14 +104,10 @@ const initConfig = async () => {
//currentsModel.value = getCurrents();
//chatConfig.value = getChatConfig();
};
onMounted(async () => {
await initConfig();
await modelStore.getModelList();
//modelList.value = modelStore.modelList;
//console.log(modelList.value)
pageLoading.value = false;
modelStore.cateList.forEach((item: any) => {
const currentModel = modelStore.modelList.find((el: any) => el.category === item && el.isdef === 1);
// currentsModel
function updateCurrentsModel() {
modelStore.cateList.forEach((item:any) => {
const currentModel = modelStore.modelList.find((el:any) => el.action === item && el.isdef === 1);
if (currentModel) {
currentsModel.value[item] = currentModel.model;
} else {
@ -122,8 +115,16 @@ onMounted(async () => {
currentsModel.value[item] = firstModel ? firstModel.model : '';
}
});
}
onMounted(async () => {
await initConfig();
await modelStore.getModelList();
updateCurrentsModel();
});
watch(modelStore.modelList, () => {
updateCurrentsModel();
});
async function changeDir() {
const path: any = await OpenDirDialog();
//console.log(path)
@ -131,7 +132,7 @@ async function changeDir() {
}
</script>
<template>
<div v-loading="pageLoading">
<div>
<el-tabs v-model="activeName" class="setting-tabs" style="margin: 12px" @tab-click="handleClick">
<el-tab-pane :label="t('aisetting.modelSetting')" name="system">
<el-scrollbar class="scrollbarSettingHeight">

2
frontend/src/i18n/lang/zh.json

@ -289,7 +289,7 @@
"chineseDescription": "中文描述",
"englishDescription": "英文描述",
"labelNameEmpty": "名称不能为空",
"local": "本地",
"local": "内网",
"network": "网络",
"invalidModel": "请输入正确的模型!",
"invalidContextLength": "请输入正确的上下文长度!",

122
frontend/src/stores/model.ts

@ -3,94 +3,13 @@ import { ref } from "vue";
import { db } from "./db.ts"
import { aiLabels } from "./labels/index.ts"
import { fetchGet, getSystemKey } from "@/system/config"
const modelEngines = [
{
name: "ollama",
cpp: "llama.cpp",
needQuant: true
},
{
name: "sd",
cpp: "stable-diffusion.cpp",
needQuant: false
},
{
name: "voice",
cpp: "sherpa.cpp",
needQuant: false
}
]
const llamaQuant = [
"q2_K",
"q3_K",
"q3_K_S",
"q3_K_M",
"q3_K_L",
"q4_0",
"q4_1",
"q4_K",
"q4_K_S",
"q4_K_M",
"q5_0",
"q5_1",
"q5_K",
"q5_K_S",
"q5_K_M",
"q6_K",
"q8_0",
"f16",
]
import { cateList, modelEngines, netEngines, llamaQuant, chatInitConfig } from "./modelconfig"
export const useModelStore = defineStore('modelStore', () => {
const labelList: any = ref([])
const cateList: any = ["chat", "translation", "code", "img2txt", "image", "tts", "audio", "embeddings"]
const modelList: any = ref([])
const downList: any = ref([])
const chatConfig: any = ref({
chat: {
key: "chat",
contextLength: 10,
num_keep: 5, //保留多少个最有可能的预测结果。这与top_k一起使用,决定模型在生成下一个词时考虑的词汇范围。
num_predict: 3, //生成多少个预测结果
top_p: 0.95,
top_k: 40, //影响生成的随机性。较高的top_k值将使模型考虑更多的词汇
temperature: 0.7, //影响生成的随机性。较低的温度产生更保守的输出,较高的温度产生更随机的输出。
},
translation: {
key: "translation",
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
creation: {
key: "creation",
num_keep: 3,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
knowledge: {
key: "knowledge",
contextLength: 10,
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
spoken: {
key: "spoken",
contextLength: 10,
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
}
})
const chatConfig: any = ref(chatInitConfig)
const aiUrl = getSystemKey("aiUrl")
async function getLabelCate(cateName: string) {
@ -145,26 +64,26 @@ export const useModelStore = defineStore('modelStore', () => {
const data = await res.json();
// console.log(data);
if (data && data.length > 0) {
// 获取当前modelList中的模型名称
const existingModels:any = [];
const has = await db.getAll("modelslist");
has.forEach((model: any) => {
if(model.isdef && model.isdef > 0) {
existingModels.push(model.model)
}
})
data.forEach((d:any) => {
if (existingModels.includes(d.model)) {
d.isdef = 1
}
});
await db.clear("modelslist");
await db.addAll("modelslist", data);
modelList.value = data;
// 获取当前modelList中的模型名称
const existingModels: any = [];
const has = await db.getAll("modelslist");
has.forEach((model: any) => {
if (model.isdef && model.isdef > 0) {
existingModels.push(model.model)
}
})
data.forEach((d: any) => {
if (existingModels.includes(d.model)) {
d.isdef = 1
}
});
await db.clear("modelslist");
await db.addAll("modelslist", data);
modelList.value = data;
}
// 重新获取所有模型列表
}
}
async function refreshOllama() {
const res = await fetchGet(`${aiUrl}/ai/refreshOllama`)
//console.log(res)
@ -340,6 +259,7 @@ export const useModelStore = defineStore('modelStore', () => {
modelList,
downList,
modelEngines,
netEngines,
llamaQuant,
chatConfig,
getList,

149
frontend/src/stores/modelconfig.ts

@ -0,0 +1,149 @@
export const cateList: any = ["chat", "translation", "code", "img2txt", "image", "tts", "audio", "embeddings"]
export const modelEngines = [
{
name: "ollama",
cpp: "ollama",
needQuant: true
},
{
name: "llama",
cpp: "llama.cpp",
needQuant: true
},
{
name: "cortex",
cpp: "cortex.cpp",
needQuant: true
},
{
name: "llamafile",
cpp: "llamafile",
needQuant: false
},
{
name: "sd",
cpp: "stable-diffusion.cpp",
needQuant: false
},
{
name: "voice",
cpp: "sherpa.cpp",
needQuant: false
}
]
export const netEngines = [
{
name: "OpenAI",
cpp: "ChatGPT"
},
{
name: "Google",
cpp: "gemini"
},
{
name: "giteeAI",
cpp: "giteeAI",
},
{
name: "Baidu",
cpp: "baidu"
},
{
name: "Alibaba",
cpp: "ali"
},
{
name: "Tencent",
cpp: "tencent"
},
{
name: "Kimi",
cpp: "Moonshot"
},
{
name: "BigModel",
cpp: "BigModel"
},
{
name: "xAI",
cpp: "xAI"
},
{
name: "Stability",
cpp: "stability"
},
{
name: "Anthropic",
cpp: "claude"
},
{
name: "Groq",
cpp: "groqcloud"
},
]
export const llamaQuant = [
"q2_K",
"q3_K",
"q3_K_S",
"q3_K_M",
"q3_K_L",
"q4_0",
"q4_1",
"q4_K",
"q4_K_S",
"q4_K_M",
"q5_0",
"q5_1",
"q5_K",
"q5_K_S",
"q5_K_M",
"q6_K",
"q8_0",
"f16",
]
export const chatInitConfig = {
chat: {
key: "chat",
contextLength: 10,
num_keep: 5, //保留多少个最有可能的预测结果。这与top_k一起使用,决定模型在生成下一个词时考虑的词汇范围。
num_predict: 3, //生成多少个预测结果
top_p: 0.95,
top_k: 40, //影响生成的随机性。较高的top_k值将使模型考虑更多的词汇
temperature: 0.7, //影响生成的随机性。较低的温度产生更保守的输出,较高的温度产生更随机的输出。
},
translation: {
key: "translation",
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
creation: {
key: "creation",
num_keep: 3,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
knowledge: {
key: "knowledge",
contextLength: 10,
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
spoken: {
key: "spoken",
contextLength: 10,
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
}
}

2
godo/model/types.go

@ -80,7 +80,7 @@ type ReqBody struct {
//Options ModelConfig `json:"options"`
Model string `json:"model"`
Action string `json:"action"`
Type string `json:"type"`
Type string `json:"type"` //local or net
Label string `json:"label"`
Info ModelInfo `json:"info"`
Status string `json:"status"`

Loading…
Cancel
Save