Browse Source

change ai

master
godo 6 months ago
parent
commit
601f9c09ef
  1. 66
      frontend/src/components/ai/DownAddbox.vue
  2. 46
      frontend/src/components/ai/DownLabeleditor.vue
  3. 4
      frontend/src/components/ai/DownModelInfo.vue
  4. 23
      frontend/src/components/ai/aimodel.vue
  5. 16
      frontend/src/components/ai/aisetting.vue
  6. 4
      frontend/src/components/localchat/ChatDomain.vue
  7. 15
      frontend/src/components/localchat/ChatNav.vue
  8. 1
      frontend/src/hook/useAi.ts
  9. 1
      frontend/src/i18n/lang/en.json
  10. 10
      frontend/src/i18n/lang/zh.json
  11. 25
      frontend/src/stores/labels/bgereranker.ts
  12. 2
      frontend/src/stores/labels/index.ts
  13. 96
      frontend/src/stores/labels/qwen.ts
  14. 5
      frontend/src/stores/localchat.ts
  15. 17
      frontend/src/stores/model.ts
  16. 34
      frontend/src/stores/modelconfig.ts
  17. 20
      frontend/src/system/config.ts
  18. 23
      godo/model/api/gitee.go
  19. 30
      godo/model/api/openai.go
  20. 42
      godo/model/convert.go
  21. 2
      godo/model/down.go
  22. 36
      godo/model/libs.go
  23. 108
      godo/model/ollama.go
  24. 61
      godo/model/server.go
  25. 18
      godo/sys/setting.go

66
frontend/src/components/ai/DownAddbox.vue

@ -6,10 +6,10 @@ import { t } from "@/i18n/index";
const modelStore = useModelStore(); const modelStore = useModelStore();
const formInit = { const formInit = {
model: "",
labelId: "", labelId: "",
ip: "", ip: "",
info: { info: {
model: "",
url: "", url: "",
from: "ollama", from: "ollama",
file_name: "", file_name: "",
@ -86,7 +86,7 @@ async function getLocalModel() {
} }
function setLocalInfo() { function setLocalInfo() {
let modelData: any = localModels.value.find((item: any) => { let modelData: any = localModels.value.find((item: any) => {
return item.model === formData.value.model; return item.model === formData.value.info.model;
}); });
if (!modelData) { if (!modelData) {
notifyError(t('model.invalidModel')); notifyError(t('model.invalidModel'));
@ -116,42 +116,46 @@ async function download() {
return; return;
} }
if (saveData.from == "ollama") { if (saveData.info.from == "ollama") {
if (saveData.model == "") { if (saveData.info.model == "") {
notifyError(t('model.labelNameEmpty')); notifyError(t('model.labelNameEmpty'));
return; return;
} }
if (saveData.model.indexOf(":") === -1) { if (saveData.info.model.indexOf(":") === -1) {
saveData.model = saveData.model + ":latest"; saveData.info.model = saveData.info.model + ":latest";
} }
if (saveData.info.url == "") {
saveData.info.url = []
}
saveData.info.context_length = 1024
} }
if (saveData.from == "local") { if (saveData.info.from == "local") {
if (!saveData.url || saveData.url.length == 0) { if (!saveData.info.url || saveData.info.url.length == 0) {
notifyError(t('model.invalidModel')); notifyError(t('model.invalidModel'));
return; return;
} }
} }
if (saveData.from == "network") { if (saveData.info.from == "network") {
if (isNaN(saveData.context_length) || saveData.context_length < 1) { if (isNaN(saveData.context_length) || saveData.info.context_length < 1) {
notifyError(t('model.invalidContextLength')); notifyError(t('model.invalidContextLength'));
return; return;
} }
saveData.context_length = saveData.context_length * 1; saveData.info.context_length = saveData.info.context_length * 1;
if (saveData.url == "") { if (saveData.info.url == "") {
notifyError(t('model.invalidModelUrl')); notifyError(t('model.invalidModelUrl'));
return; return;
} }
if (saveData.url != "" && typeof saveData.url === "string") { if (saveData.info.url != "" && typeof saveData.url === "string") {
saveData.url = saveData.url.split("\n"); saveData.info.url = saveData.info.url.split("\n");
} else { } else {
saveData.url = []; saveData.info.url = [];
} }
if (saveData.engine == "ollama") { if (saveData.engine == "ollama") {
saveData.type = 'llm' saveData.type = 'local'
saveData.params = { saveData.info.params = {
top_p: 0.95, top_p: 0.95,
stream: true, stream: true,
num_keep: 5, num_keep: 5,
@ -160,18 +164,18 @@ async function download() {
temperature: 0.7, temperature: 0.7,
}; };
if (saveData.parameters != "" && typeof saveData.parameters === "string") { if (saveData.info.parameters != "" && typeof saveData.info.parameters === "string") {
saveData.parameters = saveData.parameters.split("\n"); saveData.info.parameters = saveData.info.parameters.split("\n");
} else { } else {
saveData.parameters = []; saveData.info.parameters = [];
} }
saveData.info = { // saveData.info = {
quant: saveData.quant, // quant: saveData.quant,
context_length: saveData.context_length, // context_length: saveData.context_length,
template: saveData.template, // template: saveData.template,
parameters: saveData.parameters, // parameters: saveData.parameters,
pb: saveData.pb.toUpperCase(), // pb: saveData.pb.toUpperCase(),
}; // };
const lowerName = saveData.info.pb.replace("B", "") * 1; const lowerName = saveData.info.pb.replace("B", "") * 1;
if (lowerName < 3) { if (lowerName < 3) {
saveData.info.cpu = "8GB"; saveData.info.cpu = "8GB";
@ -184,8 +188,8 @@ async function download() {
saveData.info.cpu = "32GB"; saveData.info.cpu = "32GB";
saveData.info.gpu = "12GB"; saveData.info.gpu = "12GB";
} }
if (saveData.model.indexOf(":") === -1) { if (saveData.info.model.indexOf(":") === -1) {
saveData.model = saveData.model + ":latest"; saveData.info.model = saveData.info.model + ":latest";
} }
} }
} }
@ -211,7 +215,7 @@ async function download() {
</el-select> </el-select>
</el-form-item> </el-form-item>
<el-form-item :label="t('model.modelName')" v-if="formData.info.from !== 'local'"> <el-form-item :label="t('model.modelName')" v-if="formData.info.from !== 'local'">
<el-input v-model="formData.model" prefix-icon="House" clearable <el-input v-model="formData.info.model" prefix-icon="House" clearable
:placeholder="t('model.enterModelName')"></el-input> :placeholder="t('model.enterModelName')"></el-input>
</el-form-item> </el-form-item>
@ -221,7 +225,7 @@ async function download() {
@blur="getLocalModel"></el-input> @blur="getLocalModel"></el-input>
</el-form-item> </el-form-item>
<el-form-item :label="t('model.selectModel')" v-if="localModels.length > 0"> <el-form-item :label="t('model.selectModel')" v-if="localModels.length > 0">
<el-select v-model="formData.model" @change="setLocalInfo"> <el-select v-model="formData.info.model" @change="setLocalInfo">
<el-option v-for="(item, key) in localModels" :key="key" :label="item.model" :value="item.model" /> <el-option v-for="(item, key) in localModels" :key="key" :label="item.model" :value="item.model" />
</el-select> </el-select>
</el-form-item> </el-form-item>

46
frontend/src/components/ai/DownLabeleditor.vue

@ -17,7 +17,7 @@ const labelData:any = ref({
zhdesc: "", zhdesc: "",
endesc: "", endesc: "",
family: "", family: "",
action:[], action: "",
}) })
async function save() { async function save() {
const saveData = toRaw(labelData.value) const saveData = toRaw(labelData.value)
@ -33,8 +33,8 @@ async function save() {
await db.update("modelslabel", props.labelId, saveData) await db.update("modelslabel", props.labelId, saveData)
} else { } else {
saveData.models = [] saveData.models = []
saveData.chanel = getSystemKey('currentChanel') //saveData.chanel = getSystemKey('currentChanel')
console.log(saveData) //console.log(saveData)
await db.addOne("modelslabel", saveData) await db.addOne("modelslabel", saveData)
} }
emit("closeFn", false); emit("closeFn", false);
@ -49,7 +49,7 @@ watchEffect(async () => {
zhdesc: "", zhdesc: "",
endesc: "", endesc: "",
family: "", family: "",
action:[], action: "",
} }
} }
@ -58,47 +58,25 @@ watchEffect(async () => {
<template> <template>
<el-form label-width="100px" style="margin-top:12px"> <el-form label-width="100px" style="margin-top:12px">
<el-form-item :label="t('model.labelName')"> <el-form-item :label="t('model.labelName')">
<el-input <el-input v-model="labelData.name" :placeholder="t('model.labelName')" prefix-icon="House" clearable
v-model="labelData.name" resize="none"></el-input>
:placeholder="t('model.labelName')"
prefix-icon="House"
clearable
resize="none"
></el-input>
</el-form-item> </el-form-item>
<el-form-item :label="t('model.family')"> <el-form-item :label="t('model.family')">
<el-input <el-input v-model="labelData.family" :placeholder="t('model.family')" prefix-icon="HomeFilled" clearable
v-model="labelData.family" resize="none"></el-input>
:placeholder="t('model.family')"
prefix-icon="HomeFilled"
clearable
resize="none"
></el-input>
</el-form-item> </el-form-item>
<el-form-item :label="t('model.category')"> <el-form-item :label="t('model.category')">
<el-select v-model="labelData.action" :multiple="true" :placeholder="t('model.selectCategory')"> <el-select v-model="labelData.action" :placeholder="t('model.selectCategory')">
<el-option <el-option v-for="(item, key) in modelStore.cateList" :key="key" :label="t('model.' + item)" :value="item" />
v-for="(item, key) in modelStore.cateList"
:key="key"
:label="t('model.'+item)"
:value="item"
/>
</el-select> </el-select>
</el-form-item> </el-form-item>
<el-form-item :label="t('model.chineseDescription')"> <el-form-item :label="t('model.chineseDescription')">
<el-input <el-input :placeholder="t('model.chineseDescription')" v-model="labelData.zhdesc"></el-input>
:placeholder="t('model.chineseDescription')"
v-model="labelData.zhdesc"
></el-input>
</el-form-item> </el-form-item>
<el-form-item :label="t('model.englishDescription')"> <el-form-item :label="t('model.englishDescription')">
<el-input <el-input :placeholder="t('model.englishDescription')" :row="3" v-model="labelData.endesc"></el-input>
:placeholder="t('model.englishDescription')"
:row="3"
v-model="labelData.endesc"
></el-input>
</el-form-item> </el-form-item>
<el-form-item> <el-form-item>
<el-button type="primary" icon="CirclePlus" @click="save">{{ t('common.save') }}</el-button> <el-button type="primary" icon="CirclePlus" @click="save">{{ t('common.save') }}</el-button>

4
frontend/src/components/ai/DownModelInfo.vue

@ -30,13 +30,13 @@ const modelInfo = modelStore.getModelInfo(model);
<el-row justify="space-around"> <el-row justify="space-around">
<el-col :span="10" class="tc"><el-text>{{ t('model.modelEngine') }}</el-text></el-col> <el-col :span="10" class="tc"><el-text>{{ t('model.modelEngine') }}</el-text></el-col>
<el-col :span="14"> <el-col :span="14">
<el-tag type="primary">{{modelInfo.engine}}</el-tag> <el-tag type="primary">{{modelInfo.info.engine}}</el-tag>
</el-col> </el-col>
</el-row> </el-row>
<el-row justify="space-around" v-if="modelInfo.action"> <el-row justify="space-around" v-if="modelInfo.action">
<el-col :span="10" class="tc"><el-text>{{ t('model.applicableScope') }}</el-text></el-col> <el-col :span="10" class="tc"><el-text>{{ t('model.applicableScope') }}</el-text></el-col>
<el-col :span="14"> <el-col :span="14">
<el-tag type="primary" v-for="item in modelInfo.action" style="margin-right: 5px;">{{t('model.' + item)}}</el-tag> <el-tag type="primary">{{t('model.' + modelInfo.action)}}</el-tag>
</el-col> </el-col>
</el-row> </el-row>
<el-row justify="space-around" v-if="modelInfo.info.context_length"> <el-row justify="space-around" v-if="modelInfo.info.context_length">

23
frontend/src/components/ai/aimodel.vue

@ -39,7 +39,7 @@ async function downLabel(modelData: any, labelData: any) {
modelData = toRaw(modelData); modelData = toRaw(modelData);
//console.log(modelData, labelData) //console.log(modelData, labelData)
const saveData = { const saveData = {
model: modelData.model, model: modelData.info.model,
label: labelData.name, label: labelData.name,
action: labelData.action, action: labelData.action,
engine: modelData.info.engine, engine: modelData.info.engine,
@ -59,6 +59,7 @@ async function saveBox(modelData: any) {
notifyError(t('model.chooseLabel')); notifyError(t('model.chooseLabel'));
return; return;
} }
//console.log(modelData)
downLabel(modelData, labelData); downLabel(modelData, labelData);
} }
async function download(saveData: any) { async function download(saveData: any) {
@ -135,25 +136,15 @@ async function handleDown(modelData: any, completion: any) {
modelData.status = msg.status; modelData.status = msg.status;
if (msg.total && msg.completed && msg.total > 0) { if (msg.total && msg.completed && msg.total > 0) {
if (msg.total == msg.completed) {
msg.status = "success"
} else {
modelData.isLoading = 1; modelData.isLoading = 1;
modelData.progress = Math.ceil((msg.completed / msg.total) * 100); modelData.progress = Math.ceil((msg.completed / msg.total) * 100);
if (modelData.progress == 100 || msg.total == msg.completed) {
msg.status = "success"
} }
} else { } else {
modelData.progress = 0; modelData.progress = 0;
} }
if (msg.status == "success") {
modelData.isLoading = 0;
modelData.progress = 0;
}
//console.log(modelData);
await modelStore.updateDownload(modelData); await modelStore.updateDownload(modelData);
if (msg.status == "success") {
modelStore.deleteDownload(modelData.model);
modelStore.setCurrentModel(toRaw(modelData.action), modelData.model);
}
} catch (error) { } catch (error) {
console.error("An error occurred:", error); console.error("An error occurred:", error);
break; break;
@ -165,10 +156,10 @@ async function deleteModel(modelData: any) {
modelData = toRaw(modelData); modelData = toRaw(modelData);
//console.log(modelData) //console.log(modelData)
try { try {
const res:any = await modelStore.deleteModelList(modelData); await modelStore.deleteModelList(modelData);
notifySuccess(res); notifySuccess(t('prompt.delSuccess'));
} catch (error: any) { } catch (error: any) {
console.log(error); //console.log(error);
notifyError(error.message); notifyError(error.message);
} }
} }

16
frontend/src/components/ai/aisetting.vue

@ -61,7 +61,7 @@ const saveConfig = async () => {
let postData: any = [] let postData: any = []
if (config.value.dataDir.trim() != "") { if (config.value.dataDir.trim() != "") {
postData.push({ postData.push({
name: "dataDir", name: "aiDir",
value: config.value.dataDir.trim(), value: config.value.dataDir.trim(),
}) })
} }
@ -71,6 +71,12 @@ const saveConfig = async () => {
value: config.value.ollamaUrl.trim(), value: config.value.ollamaUrl.trim(),
}) })
} }
if (config.value.openaiUrl.trim() != "") {
postData.push({
name: "openaiUrl",
value: config.value.openaiUrl.trim(),
})
}
if (postData.length > 0) { if (postData.length > 0) {
const postDatas = { const postDatas = {
method: "POST", method: "POST",
@ -175,6 +181,12 @@ async function changeDir() {
clearable></el-input> clearable></el-input>
</div> </div>
</el-form-item> </el-form-item>
<el-form-item label="OpenAI URL">
<div class="slider-container">
<el-input v-model="config.openaiUrl" placeholder="OpenAI URL" prefix-icon="Notification"
clearable></el-input>
</div>
</el-form-item>
<el-form-item> <el-form-item>
<el-button @click="saveConfig" type="info" plain> <el-button @click="saveConfig" type="info" plain>
@ -307,7 +319,7 @@ async function changeDir() {
} }
.scrollbarSettingHeight { .scrollbarSettingHeight {
height: 80vh; height: 85vh;
padding-bottom: 30px; padding-bottom: 30px;
} }

4
frontend/src/components/localchat/ChatDomain.vue

@ -1,7 +1,7 @@
<template> <template>
<div class="win11-msg-container"> <div class="win11-msg-container">
<el-scrollbar> <el-scrollbar>
<div v-if="store.navId < 2" class="user-list-area"> <div v-if="store.navId == 1" class="user-list-area">
<el-row class="user-list" justify="space-around" v-for="(msg, key) in store.contentList" :key="key" <el-row class="user-list" justify="space-around" v-for="(msg, key) in store.contentList" :key="key"
v-if="store.contentList.length > 0"> v-if="store.contentList.length > 0">
<!-- <el-col :span="5" class="avatar-col"> <!-- <el-col :span="5" class="avatar-col">
@ -33,7 +33,7 @@
<el-empty v-else :image-size="100" description="消息列表为空" /> <el-empty v-else :image-size="100" description="消息列表为空" />
</div> </div>
<div v-else class="user-list-area"> <div v-else-if="store.navId == 2" class="user-list-area">
<el-row justify="space-between"> <el-row justify="space-between">
<el-icon :size="18" @click="store.refreshUserList"> <el-icon :size="18" @click="store.refreshUserList">
<RefreshRight /> <RefreshRight />

15
frontend/src/components/localchat/ChatNav.vue

@ -2,6 +2,14 @@
<el-row> <el-row>
<el-avatar shape="square" :size="40" class="userAvatar" src="/logo.png"/> <el-avatar shape="square" :size="40" class="userAvatar" src="/logo.png"/>
</el-row> </el-row>
<el-row @click="store.handleSelect(0)">
<el-icon v-if="store.navId === 0" class="menu-icon-on">
<Promotion />
</el-icon>
<el-icon v-else class="menu-icon">
<Position />
</el-icon>
</el-row>
<el-row @click="store.handleSelect(1)"> <el-row @click="store.handleSelect(1)">
<el-icon v-if="store.navId === 1" class="menu-icon-on"> <el-icon v-if="store.navId === 1" class="menu-icon-on">
<ChatLineRound /> <ChatLineRound />
@ -18,13 +26,6 @@
<User /> <User />
</el-icon> </el-icon>
</el-row> </el-row>
<!-- <el-space direction="vertical" :size="20" class="win11-chat-nav">
<div :class="store.navId === item.index ? 'nav-item active' : 'nav-item'" v-for="item in store.navList" :key="item.index">
<el-icon size="18" @click="store.handleSelect(item.index)">
<component :is="item.icon" />
</el-icon>
</div>
</el-space> -->
</template> </template>
<script setup lang="ts"> <script setup lang="ts">

1
frontend/src/hook/useAi.ts

@ -45,6 +45,7 @@ export async function askAi(question: any, action: string) {
content: prompt content: prompt
}, },
], ],
engine: model.info.engine,
model: model.model, model: model.model,
stream: false, stream: false,
options: modelStore.chatConfig.creation, options: modelStore.chatConfig.creation,

1
frontend/src/i18n/lang/en.json

@ -256,6 +256,7 @@
"video": "Video", "video": "Video",
"embed": "Embeding", "embed": "Embeding",
"embeddings": "Embeddings", "embeddings": "Embeddings",
"reranker":"Reranker",
"tts": "Txt2Audio", "tts": "Txt2Audio",
"audio": "Audio2Txt", "audio": "Audio2Txt",
"assistant": "Assistant", "assistant": "Assistant",

10
frontend/src/i18n/lang/zh.json

@ -259,6 +259,7 @@
"recording": "听录", "recording": "听录",
"video": "视频", "video": "视频",
"embeddings": "嵌入", "embeddings": "嵌入",
"reranker": "排序",
"tts": "文字转声音", "tts": "文字转声音",
"audio": "声音转文字", "audio": "声音转文字",
"assistant": "助手", "assistant": "助手",
@ -272,14 +273,6 @@
"downloading": "下载中", "downloading": "下载中",
"modelLabel": "模型标签", "modelLabel": "模型标签",
"modelDown": "模型下载", "modelDown": "模型下载",
"help_label": "选择分类",
"help_labelDesc": "选择你想要下载的模型类目",
"help_showdown": "查看下载",
"help_showdownDesc": "点击此处查看下载列表和已下载的文件",
"help_adddown": "添加新的下载",
"help_adddownDesc": "点击此处可以添加新的下载,属于高级操作",
"help_addlabel": "添加新的标签",
"help_addlabelDesc": "点击此处可以添加新的标签,标签内没有下载列表方可删除",
"labelName": "标签名称", "labelName": "标签名称",
"family": "家族", "family": "家族",
"category": "分类", "category": "分类",
@ -361,5 +354,4 @@
"tips_num_predict": "num_predict通常是指在文本生成任务中,指定模型生成的token数量或预测步数。简单来说,就是指定了生成文本的长度,单位通常是token(可能是词、子词或其他单位,依据模型而定)。<br />最小值1:表示生成最少一个token的文本,这在实际应用中可能意义不大,除非模型的输出是高度结构化的,单个token也能构成完整信息。<br />最大值5000:最大生成长度,适合于生成较短的文本片段,如简短的回答、总结或短句。实际应用中,num_predict的具体取值范围应根据模型的能力、应用场景的需求以及资源限制(如计算成本和响应时间)来设定。对于需要生成较长文本的任务,比如文章创作、故事生成等,num_predict的上限可能会设置得更高,比如几百甚至上千。但需要注意的是,随着生成长度的增加,不仅计算成本会上升,生成文本的连贯性和质量控制也会变得更加复杂。", "tips_num_predict": "num_predict通常是指在文本生成任务中,指定模型生成的token数量或预测步数。简单来说,就是指定了生成文本的长度,单位通常是token(可能是词、子词或其他单位,依据模型而定)。<br />最小值1:表示生成最少一个token的文本,这在实际应用中可能意义不大,除非模型的输出是高度结构化的,单个token也能构成完整信息。<br />最大值5000:最大生成长度,适合于生成较短的文本片段,如简短的回答、总结或短句。实际应用中,num_predict的具体取值范围应根据模型的能力、应用场景的需求以及资源限制(如计算成本和响应时间)来设定。对于需要生成较长文本的任务,比如文章创作、故事生成等,num_predict的上限可能会设置得更高,比如几百甚至上千。但需要注意的是,随着生成长度的增加,不仅计算成本会上升,生成文本的连贯性和质量控制也会变得更加复杂。",
"tips_num_keep": "num_keep的值可以影响生成文本与原始输入的关联度和连贯性。较大的num_keep值有助于保持生成内容与输入的连续性和一致性,而较小的值则可能让模型生成更加自由、多变的文本。" "tips_num_keep": "num_keep的值可以影响生成文本与原始输入的关联度和连贯性。较大的num_keep值有助于保持生成内容与输入的连续性和一致性,而较小的值则可能让模型生成更加自由、多变的文本。"
} }
} }

25
frontend/src/stores/labels/bgereranker.ts

@ -0,0 +1,25 @@
export const bgeRerankerLabels = {
name: "bge-reranker",
family: "bge",
action: "reranker",
models: [
{
model: "linux6200/bge-reranker-v2-m3",
params: {
"num_ctx": 4096,
"temperature": 1
},
info: {
engine: "ollama",
from: "ollama",
size: "1.2GB",
desk: "2GB",
cpu: "8GB",
gpu: "6GB",
quant: "f16"
}
},
],
zhdesc: "bge-reranker是BAAI开发的排序模型",
endesc: "bge is an reranker model developed by BAAI"
}

2
frontend/src/stores/labels/index.ts

@ -26,6 +26,7 @@ import { deepseekcoderLabels } from './deepseekcoder.ts'
import { starcoder2Labels } from './starcoder2.ts' import { starcoder2Labels } from './starcoder2.ts'
import { duckdbnsqlLabels } from './duckdbnsql.ts' import { duckdbnsqlLabels } from './duckdbnsql.ts'
import { bgeLabels } from './bge.ts' import { bgeLabels } from './bge.ts'
import { bgeRerankerLabels } from './bgereranker.ts'
import { dmetaLabels } from './dmeta.ts' import { dmetaLabels } from './dmeta.ts'
import { nomicLabels } from './nomic.ts' import { nomicLabels } from './nomic.ts'
import { snowflakeLabels } from './snowflake.ts' import { snowflakeLabels } from './snowflake.ts'
@ -66,6 +67,7 @@ export const aiLabels = [
starcoder2Labels, starcoder2Labels,
duckdbnsqlLabels, duckdbnsqlLabels,
bgeLabels, bgeLabels,
bgeRerankerLabels,
dmetaLabels, dmetaLabels,
nomicLabels, nomicLabels,
snowflakeLabels, snowflakeLabels,

96
frontend/src/stores/labels/qwen.ts

@ -5,6 +5,102 @@ export const qwenLabels = {
zhdesc: "Qwen是阿里云基于transformer的一系列大型语言模型,在大量数据上进行预训练,包括网络文本、书籍、代码等。", zhdesc: "Qwen是阿里云基于transformer的一系列大型语言模型,在大量数据上进行预训练,包括网络文本、书籍、代码等。",
endesc: "Qwen is a series of transformer-based large language models by Alibaba Cloud, pre-trained on a large volume of data, including web texts, books, code, etc.", endesc: "Qwen is a series of transformer-based large language models by Alibaba Cloud, pre-trained on a large volume of data, including web texts, books, code, etc.",
models: [ models: [
{
model: "qwen2.5:0.5b",
params: {
top_p: 0.95,
stream: true,
num_keep: 5,
num_predict: 1,
top_k: 40,
temperature: 0.7,
stop: [
"<|im_start|>",
"<|im_end|>"
]
},
info: {
engine: "ollama",
from: "ollama",
size: "494MB",
desk: "1GB",
cpu: "8GB",
gpu: "6GB",
quant: "q4"
}
},
{
model: "qwen2.5:1.5b",
params: {
top_p: 0.95,
stream: true,
num_keep: 5,
num_predict: 1,
top_k: 40,
temperature: 0.7,
stop: [
"<|im_start|>",
"<|im_end|>"
]
},
info: {
engine: "ollama",
from: "ollama",
size: "1.54GB",
desk: "1.6GB",
cpu: "8GB",
gpu: "6GB",
quant: "q4"
}
},
{
model: "qwen2.5:3b",
params: {
top_p: 0.95,
stream: true,
num_keep: 5,
num_predict: 1,
top_k: 40,
temperature: 0.7,
stop: [
"<|im_start|>",
"<|im_end|>"
]
},
info: {
engine: "ollama",
from: "ollama",
size: "1.9GB",
desk: "2GB",
cpu: "16GB",
gpu: "8GB",
quant: "q4"
}
},
{
model: "qwen2.5:7b",
params: {
top_p: 0.95,
stream: true,
num_keep: 5,
num_predict: 1,
top_k: 40,
temperature: 0.7,
stop: [
"<|im_start|>",
"<|im_end|>"
]
},
info: {
engine: "ollama",
from: "ollama",
size: "4.7GB",
desk: "5GB",
cpu: "16GB",
gpu: "8GB",
quant: "q4"
}
},
{ {
model: "qwen2:0.5b", model: "qwen2:0.5b",
params: { params: {

5
frontend/src/stores/localchat.ts

@ -16,10 +16,6 @@ export const useLocalChatStore = defineStore('localChatStore', () => {
const showChooseFile = ref(false) const showChooseFile = ref(false)
const currentPage = ref(1) const currentPage = ref(1)
const pageSize = ref(50) const pageSize = ref(50)
const navList = ref([
{ index: 1, lable: "消息列表", icon: "ChatDotRound", type: "success" },
{ index: 2, lable: "用户列表", icon: "UserFilled", type: "info" },
])
const navId = ref(1) const navId = ref(1)
const sendInfo:any = ref() const sendInfo:any = ref()
const chatTargetId = ref(0) const chatTargetId = ref(0)
@ -463,7 +459,6 @@ export const useLocalChatStore = defineStore('localChatStore', () => {
} }
return { return {
userList, userList,
navList,
sendInfo, sendInfo,
navId, navId,
chatTargetId, chatTargetId,

17
frontend/src/stores/model.ts

@ -76,12 +76,14 @@ export const useModelStore = defineStore('modelStore', () => {
if (existingModels.includes(d.model)) { if (existingModels.includes(d.model)) {
d.isdef = 1 d.isdef = 1
} }
if(d.action == ""){
d.action = "chat"
}
}); });
await db.clear("modelslist"); await db.clear("modelslist");
await db.addAll("modelslist", data); await db.addAll("modelslist", data);
modelList.value = data; modelList.value = data;
} }
// 重新获取所有模型列表
} }
async function refreshOllama() { async function refreshOllama() {
@ -159,13 +161,9 @@ export const useModelStore = defineStore('modelStore', () => {
}); });
await db.deleteByField("modelslist", "model", data.model) await db.deleteByField("modelslist", "model", data.model)
if (data.isdef * 1 == 1) { if (data.isdef * 1 == 1) {
await setCurrentModel(data.action, "") await setDefModel(data.action)
} }
} }
//await db.delete("modelslist", data.id)
//await getModelList()
} }
function checkDownload(name: string) { function checkDownload(name: string) {
@ -178,7 +176,6 @@ export const useModelStore = defineStore('modelStore', () => {
} else { } else {
updateDownload(data) updateDownload(data)
} }
return data return data
} }
function deleteDownload(model: string) { function deleteDownload(model: string) {
@ -200,10 +197,12 @@ export const useModelStore = defineStore('modelStore', () => {
isLoading: modelData.isLoading ?? 0, isLoading: modelData.isLoading ?? 0,
}); });
if (modelData.status === "success") { if (modelData.status === "success") {
//await addDownList(modelData); modelData.isLoading = 0;
modelData.progress = 0;
deleteDownload(modelData.model);
await getModelList(); await getModelList();
await setDefModel(modelData.action);
await checkLabelData(modelData); await checkLabelData(modelData);
await setCurrentModel(modelData.action, modelData.model);
} }
} }
} }

34
frontend/src/stores/modelconfig.ts

@ -1,25 +1,25 @@
export const cateList: any = ["chat", "translation", "code", "img2txt", "image", "tts", "audio", "embeddings"] export const cateList: any = ["chat", "translation", "code", "img2txt", "image", "tts", "audio", "embeddings", "reranker"]
export const modelEngines = [ export const modelEngines = [
{ {
name: "ollama", name: "ollama",
cpp: "ollama", cpp: "ollama",
needQuant: true needQuant: true
}, },
{ // {
name: "llama", // name: "llama",
cpp: "llama.cpp", // cpp: "llama.cpp",
needQuant: true // needQuant: true
}, // },
{ // {
name: "cortex", // name: "cortex",
cpp: "cortex.cpp", // cpp: "cortex.cpp",
needQuant: true // needQuant: true
}, // },
{ // {
name: "llamafile", // name: "llamafile",
cpp: "llamafile", // cpp: "llamafile",
needQuant: false // needQuant: false
}, // },
{ {
name: "sd", name: "sd",
cpp: "stable-diffusion.cpp", cpp: "stable-diffusion.cpp",
@ -42,7 +42,7 @@ export const netEngines = [
cpp: "gemini" cpp: "gemini"
}, },
{ {
name: "giteeAI", name: "GiteeAI",
cpp: "giteeAI", cpp: "giteeAI",
}, },
{ {

20
frontend/src/system/config.ts

@ -157,6 +157,26 @@ export const getSystemConfig = (ifset = false) => {
if (!config.aiUrl) { if (!config.aiUrl) {
config.aiUrl = config.apiUrl config.aiUrl = config.apiUrl
} }
if (!config.openaiUrl) {
config.openaiUrl = 'https://api.openai.com/v1'
}
if (!config.aiKey) {
config.aiKey = {
"openai": "",
"gitee": "",
// "google": "",
// "baidu": "",
// "ali": "",
// "tencent": "",
// "bigmodel": "",
// "xai": "",
// "azure": "",
// "stability": "",
// "claude": "",
// "groq": ""
}
}
// 初始化桌面快捷方式列表,若本地存储中已存在则不进行覆盖 // 初始化桌面快捷方式列表,若本地存储中已存在则不进行覆盖
if (!config.desktopList) { if (!config.desktopList) {
config.desktopList = []; config.desktopList = [];

23
godo/model/api/gitee.go

@ -0,0 +1,23 @@
package api
import (
"fmt"
"godo/libs"
)
func GetGiteeChatUrl(model string) string {
return "https://ai.gitee.com/api/serverless/" + model + "/chat/completions"
}
func GetGiteeEmbeddingUrl(model string) string {
return "https://ai.gitee.com/api/serverless/" + model + "/embeddings"
}
func GetGiteeText2ImgUrl(model string) string {
return "https://ai.gitee.com/api/serverless/" + model + "/text-to-image"
}
func GetGiteeSecret() (string, error) {
secret, has := libs.GetConfig("giteeSecret")
if !has {
return "", fmt.Errorf("the gitee secret is not set")
}
return secret.(string), nil
}

30
godo/model/api/openai.go

@ -0,0 +1,30 @@
package api
import (
"fmt"
"godo/libs"
)
// 获取 OpenAI 聊天 API 的 URL
func GetOpenAIChatUrl() string {
return "https://api.openai.com/v1/chat/completions"
}
// 获取 OpenAI 文本嵌入 API 的 URL
func GetOpenAIEmbeddingUrl() string {
return "https://api.openai.com/v1/embeddings"
}
// 获取 OpenAI 文本转图像 API 的 URL
func GetOpenAIText2ImgUrl() string {
return "https://api.openai.com/v1/images/generations"
}
// 获取 OpenAI 密钥
func GetOpenAISecret() (string, error) {
secret, has := libs.GetConfig("openaiSecret")
if !has {
return "", fmt.Errorf("the openai secret is not set")
}
return secret.(string), nil
}

42
godo/model/convert.go

@ -1,42 +0,0 @@
package model
import (
"godo/libs"
"net/http"
"os"
"strings"
)
func ConvertOllama(w http.ResponseWriter, r *http.Request, req ReqBody) {
modelFile := "FROM " + req.Info.Path[0] + "\n"
modelFile += `TEMPLATE """` + req.Info.Template + `"""`
if req.Info.Parameters != "" {
parameters := strings.Split(req.Info.Parameters, "\n")
for _, param := range parameters {
modelFile += "\nPARAMETER " + param
}
}
url := GetOllamaUrl() + "/api/create"
postParams := map[string]string{
"name": req.Model,
"modelfile": modelFile,
}
ForwardHandler(w, r, postParams, url, "POST")
modelDir, err := GetModelDir(req.Model)
if err != nil {
libs.ErrorMsg(w, "GetModelDir")
return
}
// modelFilePath := filepath.Join(modelDir, "Modelfile")
// if err := os.WriteFile(modelFilePath, []byte(modelFile), 0644); err != nil {
// ErrMsg("WriteFile", err, w)
// return
// }
err = os.RemoveAll(modelDir)
if err != nil {
libs.ErrorMsg(w, "Error removing directory")
return
}
}

2
godo/model/down.go

@ -38,7 +38,7 @@ func Download(w http.ResponseWriter, r *http.Request) {
reqBody := ReqBody{} reqBody := ReqBody{}
err := json.NewDecoder(r.Body).Decode(&reqBody) err := json.NewDecoder(r.Body).Decode(&reqBody)
if err != nil { if err != nil {
libs.ErrorMsg(w, "first Decode request body error") libs.ErrorMsg(w, "first Decode request body error:"+err.Error())
return return
} }
err = LoadConfig() err = LoadConfig()

36
godo/model/libs.go

@ -4,7 +4,6 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"godo/libs" "godo/libs"
"log"
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
@ -54,17 +53,14 @@ func GetModelPath(urls string, model string, reqType string) (string, error) {
// 构建完整的文件路径 // 构建完整的文件路径
filePath := filepath.Join(modelDir, fileName) filePath := filepath.Join(modelDir, fileName)
if reqType == "local" { if reqType == "local" {
dir, err := getOModelsDir() dir := GetOllamaModelDir()
if err != nil {
return "", err
}
if strings.Contains(fileName, "sha256-") && len(fileName) == 71 { if strings.Contains(fileName, "sha256-") && len(fileName) == 71 {
filePath = filepath.Join(dir, "blobs", fileName) filePath = filepath.Join(dir, "blobs", fileName)
log.Printf("====filePath1: %s", filePath) //log.Printf("====filePath1: %s", filePath)
} else { } else {
opName := getOpName(model) opName := getOpName(model)
filePath = filepath.Join(dir, "manifests", opName.Space, opName.LibPath, opName.Name, opName.Tag) filePath = filepath.Join(dir, "manifests", opName.Space, opName.LibPath, opName.Name, opName.Tag)
log.Printf("====filePath2: %s", filePath) //log.Printf("====filePath2: %s", filePath)
} }
} }
return filePath, nil return filePath, nil
@ -73,32 +69,34 @@ func Var(key string) string {
return strings.Trim(strings.TrimSpace(os.Getenv(key)), "\"'") return strings.Trim(strings.TrimSpace(os.Getenv(key)), "\"'")
} }
func GetHfModelDir() (string, error) { func GetHfModelDir() (string, error) {
aiDir, ok := libs.GetConfig("aiDir")
if ok {
return aiDir.(string), nil
} else {
dataDir := libs.GetDataDir() dataDir := libs.GetDataDir()
return filepath.Join(dataDir, "hfmodels"), nil return filepath.Join(dataDir, "aiModels"), nil
} }
}
func GetOllamaModelDir() string { func GetOllamaModelDir() string {
// dataDir := libs.GetDataDir()
// return filepath.Join(dataDir, "models")
if s := Var("OLLAMA_MODELS"); s != "" { if s := Var("OLLAMA_MODELS"); s != "" {
return s return s
} }
home, _ := os.UserHomeDir()
home, err := os.UserHomeDir()
if err != nil {
panic(err)
}
return filepath.Join(home, ".ollama", "models") return filepath.Join(home, ".ollama", "models")
} }
func getOModelsDir() (string, error) {
return GetOllamaModelDir(), nil
}
func GetOllamaUrl() string { func GetOllamaUrl() string {
if s := strings.TrimSpace(Var("OLLAMA_HOST")); s != "" { if s := strings.TrimSpace(Var("OLLAMA_HOST")); s != "" {
return s return s
} }
ollamaUrl, ok := libs.GetConfig("ollamaUrl")
if ok {
return ollamaUrl.(string)
} else {
return "http://localhost:11434" return "http://localhost:11434"
} }
}
func ReplaceModelName(modelName string) string { func ReplaceModelName(modelName string) string {
reg := regexp.MustCompile(`[/\s:]`) reg := regexp.MustCompile(`[/\s:]`)
return reg.ReplaceAllString(modelName, "") return reg.ReplaceAllString(modelName, "")

108
godo/model/op.go → godo/model/ollama.go

@ -4,7 +4,6 @@ import (
"bytes" "bytes"
"crypto/sha256" "crypto/sha256"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"godo/libs" "godo/libs"
"io" "io"
@ -13,7 +12,6 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"regexp" "regexp"
"sort"
"strconv" "strconv"
"strings" "strings"
"time" "time"
@ -101,64 +99,6 @@ func humanReadableSize(size int64) string {
} }
} }
func Tagshandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
var reqBodies []ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok {
reqBodies = append(reqBodies, rb)
}
return true // 继续遍历
})
// 对reqBodies按CreatedAt降序排列
sort.Slice(reqBodies, func(i, j int) bool {
return reqBodies[i].CreatedAt.After(reqBodies[j].CreatedAt) // 降序排列
})
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func ShowHandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
model := r.URL.Query().Get("model")
if model == "" {
libs.ErrorMsg(w, "Model name is empty")
return
}
//log.Printf("ShowHandler: %s", model)
var reqBodies ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok && rb.Model == model {
reqBodies = rb
return false
}
return true
})
//log.Printf("ShowHandler: %s", reqBodies)
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func extractParameterSize(sizeStr string, model string) (float64, bool) { func extractParameterSize(sizeStr string, model string) (float64, bool) {
// 尝试直接从原始sizeStr中提取数字,包括小数 // 尝试直接从原始sizeStr中提取数字,包括小数
if size, err := strconv.ParseFloat(strings.TrimSuffix(sizeStr, "B"), 64); err == nil { if size, err := strconv.ParseFloat(strings.TrimSuffix(sizeStr, "B"), 64); err == nil {
@ -414,13 +354,10 @@ func getOpName(model string) OmodelPath {
func getManifests(model string) ([]string, error) { func getManifests(model string) ([]string, error) {
res := []string{} res := []string{}
opName := getOpName(model) opName := getOpName(model)
modelsDir, err := getOModelsDir() modelsDir := GetOllamaModelDir()
if err != nil {
return res, fmt.Errorf("failed to get user home directory: %w", err)
}
manifestsFile := filepath.Join(modelsDir, "manifests", opName.Space, opName.LibPath, opName.Name, opName.Tag) manifestsFile := filepath.Join(modelsDir, "manifests", opName.Space, opName.LibPath, opName.Name, opName.Tag)
if !libs.PathExists(manifestsFile) { if !libs.PathExists(manifestsFile) {
return res, fmt.Errorf("failed to get manifests file: %w", err) return res, fmt.Errorf("failed to get manifests file: %s", manifestsFile)
} }
res = append(res, manifestsFile) res = append(res, manifestsFile)
var manifest ManifestV2 var manifest ManifestV2
@ -450,19 +387,50 @@ func getManifests(model string) ([]string, error) {
} }
func GetBlobsPath(digest string) (string, error) { func GetBlobsPath(digest string) (string, error) {
dir, err := getOModelsDir() dir := GetOllamaModelDir()
if err != nil {
return "", err
}
// only accept actual sha256 digests // only accept actual sha256 digests
pattern := "^sha256[:-][0-9a-fA-F]{64}$" pattern := "^sha256[:-][0-9a-fA-F]{64}$"
re := regexp.MustCompile(pattern) re := regexp.MustCompile(pattern)
if digest != "" && !re.MatchString(digest) { if digest != "" && !re.MatchString(digest) {
return "", errors.New("invalid digest format") return "", fmt.Errorf("invalid digest format")
} }
digest = strings.ReplaceAll(digest, ":", "-") digest = strings.ReplaceAll(digest, ":", "-")
path := filepath.Join(dir, "blobs", digest) path := filepath.Join(dir, "blobs", digest)
return path, nil return path, nil
} }
func ConvertOllama(w http.ResponseWriter, r *http.Request, req ReqBody) {
modelFile := "FROM " + req.Info.Path[0] + "\n"
modelFile += `TEMPLATE """` + req.Info.Template + `"""`
if req.Info.Parameters != "" {
parameters := strings.Split(req.Info.Parameters, "\n")
for _, param := range parameters {
modelFile += "\nPARAMETER " + param
}
}
url := GetOllamaUrl() + "/api/create"
postParams := map[string]string{
"name": req.Model,
"modelfile": modelFile,
}
ForwardHandler(w, r, postParams, url, "POST")
modelDir, err := GetModelDir(req.Model)
if err != nil {
libs.ErrorMsg(w, "GetModelDir")
return
}
// modelFilePath := filepath.Join(modelDir, "Modelfile")
// if err := os.WriteFile(modelFilePath, []byte(modelFile), 0644); err != nil {
// ErrMsg("WriteFile", err, w)
// return
// }
err = os.RemoveAll(modelDir)
if err != nil {
libs.ErrorMsg(w, "Error removing directory")
return
}
}

61
godo/model/server.go

@ -1,6 +1,7 @@
package model package model
import ( import (
"encoding/json"
"fmt" "fmt"
"godo/libs" "godo/libs"
"io" "io"
@ -8,8 +9,68 @@ import (
"net/http" "net/http"
"os" "os"
"path/filepath" "path/filepath"
"sort"
) )
func Tagshandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
var reqBodies []ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok {
reqBodies = append(reqBodies, rb)
}
return true // 继续遍历
})
// 对reqBodies按CreatedAt降序排列
sort.Slice(reqBodies, func(i, j int) bool {
return reqBodies[i].CreatedAt.After(reqBodies[j].CreatedAt) // 降序排列
})
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func ShowHandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
model := r.URL.Query().Get("model")
if model == "" {
libs.ErrorMsg(w, "Model name is empty")
return
}
//log.Printf("ShowHandler: %s", model)
var reqBodies ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok && rb.Model == model {
reqBodies = rb
return false
}
return true
})
//log.Printf("ShowHandler: %s", reqBodies)
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
type DownserverStucct struct { type DownserverStucct struct {
Path string `json:"path"` Path string `json:"path"`
} }

18
godo/sys/setting.go

@ -51,14 +51,30 @@ func ConfigHandler(w http.ResponseWriter, r *http.Request) {
} }
libs.SetConfig(req) libs.SetConfig(req)
} }
} else if req.Name == "aiDir" {
aiPath := req.Value.(string)
aiDir, ok := libs.GetConfig("aiDir")
if !ok || aiDir != aiPath {
if !libs.PathExists(aiPath) {
libs.ErrorMsg(w, "The Path is not exists!")
return
} }
if req.Name == "webdavClient" { err = os.Chmod(aiPath, 0755)
if err != nil {
libs.ErrorMsg(w, "The Path chmod is error!")
return
}
libs.SetConfig(req)
}
} else if req.Name == "webdavClient" {
libs.SetConfig(req) libs.SetConfig(req)
err := webdav.InitWebdav() err := webdav.InitWebdav()
if err != nil { if err != nil {
libs.ErrorMsg(w, "The webdav client init is error:"+err.Error()) libs.ErrorMsg(w, "The webdav client init is error:"+err.Error())
return return
} }
} else {
libs.SetConfig(req)
} }
} }

Loading…
Cancel
Save