Browse Source

change ai

master
godo 6 months ago
parent
commit
601f9c09ef
  1. 66
      frontend/src/components/ai/DownAddbox.vue
  2. 84
      frontend/src/components/ai/DownLabeleditor.vue
  3. 4
      frontend/src/components/ai/DownModelInfo.vue
  4. 25
      frontend/src/components/ai/aimodel.vue
  5. 16
      frontend/src/components/ai/aisetting.vue
  6. 4
      frontend/src/components/localchat/ChatDomain.vue
  7. 15
      frontend/src/components/localchat/ChatNav.vue
  8. 1
      frontend/src/hook/useAi.ts
  9. 1
      frontend/src/i18n/lang/en.json
  10. 14
      frontend/src/i18n/lang/zh.json
  11. 25
      frontend/src/stores/labels/bgereranker.ts
  12. 2
      frontend/src/stores/labels/index.ts
  13. 96
      frontend/src/stores/labels/qwen.ts
  14. 5
      frontend/src/stores/localchat.ts
  15. 17
      frontend/src/stores/model.ts
  16. 34
      frontend/src/stores/modelconfig.ts
  17. 26
      frontend/src/system/config.ts
  18. 23
      godo/model/api/gitee.go
  19. 30
      godo/model/api/openai.go
  20. 42
      godo/model/convert.go
  21. 2
      godo/model/down.go
  22. 40
      godo/model/libs.go
  23. 108
      godo/model/ollama.go
  24. 61
      godo/model/server.go
  25. 20
      godo/sys/setting.go

66
frontend/src/components/ai/DownAddbox.vue

@ -6,10 +6,10 @@ import { t } from "@/i18n/index";
const modelStore = useModelStore();
const formInit = {
model: "",
labelId: "",
ip: "",
info: {
model: "",
url: "",
from: "ollama",
file_name: "",
@ -86,7 +86,7 @@ async function getLocalModel() {
}
function setLocalInfo() {
let modelData: any = localModels.value.find((item: any) => {
return item.model === formData.value.model;
return item.model === formData.value.info.model;
});
if (!modelData) {
notifyError(t('model.invalidModel'));
@ -116,42 +116,46 @@ async function download() {
return;
}
if (saveData.from == "ollama") {
if (saveData.model == "") {
if (saveData.info.from == "ollama") {
if (saveData.info.model == "") {
notifyError(t('model.labelNameEmpty'));
return;
}
if (saveData.model.indexOf(":") === -1) {
saveData.model = saveData.model + ":latest";
if (saveData.info.model.indexOf(":") === -1) {
saveData.info.model = saveData.info.model + ":latest";
}
if (saveData.info.url == "") {
saveData.info.url = []
}
saveData.info.context_length = 1024
}
if (saveData.from == "local") {
if (!saveData.url || saveData.url.length == 0) {
if (saveData.info.from == "local") {
if (!saveData.info.url || saveData.info.url.length == 0) {
notifyError(t('model.invalidModel'));
return;
}
}
if (saveData.from == "network") {
if (isNaN(saveData.context_length) || saveData.context_length < 1) {
if (saveData.info.from == "network") {
if (isNaN(saveData.context_length) || saveData.info.context_length < 1) {
notifyError(t('model.invalidContextLength'));
return;
}
saveData.context_length = saveData.context_length * 1;
saveData.info.context_length = saveData.info.context_length * 1;
if (saveData.url == "") {
if (saveData.info.url == "") {
notifyError(t('model.invalidModelUrl'));
return;
}
if (saveData.url != "" && typeof saveData.url === "string") {
saveData.url = saveData.url.split("\n");
if (saveData.info.url != "" && typeof saveData.url === "string") {
saveData.info.url = saveData.info.url.split("\n");
} else {
saveData.url = [];
saveData.info.url = [];
}
if (saveData.engine == "ollama") {
saveData.type = 'llm'
saveData.params = {
saveData.type = 'local'
saveData.info.params = {
top_p: 0.95,
stream: true,
num_keep: 5,
@ -160,18 +164,18 @@ async function download() {
temperature: 0.7,
};
if (saveData.parameters != "" && typeof saveData.parameters === "string") {
saveData.parameters = saveData.parameters.split("\n");
if (saveData.info.parameters != "" && typeof saveData.info.parameters === "string") {
saveData.info.parameters = saveData.info.parameters.split("\n");
} else {
saveData.parameters = [];
saveData.info.parameters = [];
}
saveData.info = {
quant: saveData.quant,
context_length: saveData.context_length,
template: saveData.template,
parameters: saveData.parameters,
pb: saveData.pb.toUpperCase(),
};
// saveData.info = {
// quant: saveData.quant,
// context_length: saveData.context_length,
// template: saveData.template,
// parameters: saveData.parameters,
// pb: saveData.pb.toUpperCase(),
// };
const lowerName = saveData.info.pb.replace("B", "") * 1;
if (lowerName < 3) {
saveData.info.cpu = "8GB";
@ -184,8 +188,8 @@ async function download() {
saveData.info.cpu = "32GB";
saveData.info.gpu = "12GB";
}
if (saveData.model.indexOf(":") === -1) {
saveData.model = saveData.model + ":latest";
if (saveData.info.model.indexOf(":") === -1) {
saveData.info.model = saveData.info.model + ":latest";
}
}
}
@ -211,7 +215,7 @@ async function download() {
</el-select>
</el-form-item>
<el-form-item :label="t('model.modelName')" v-if="formData.info.from !== 'local'">
<el-input v-model="formData.model" prefix-icon="House" clearable
<el-input v-model="formData.info.model" prefix-icon="House" clearable
:placeholder="t('model.enterModelName')"></el-input>
</el-form-item>
@ -221,7 +225,7 @@ async function download() {
@blur="getLocalModel"></el-input>
</el-form-item>
<el-form-item :label="t('model.selectModel')" v-if="localModels.length > 0">
<el-select v-model="formData.model" @change="setLocalInfo">
<el-select v-model="formData.info.model" @change="setLocalInfo">
<el-option v-for="(item, key) in localModels" :key="key" :label="item.model" :value="item.model" />
</el-select>
</el-form-item>

84
frontend/src/components/ai/DownLabeleditor.vue

@ -1,55 +1,55 @@
<script lang="ts" setup>
import {db} from "@/stores/db"
import { db } from "@/stores/db"
import { useModelStore } from "@/stores/model";
import {notifyError} from "@/util/msg";
import { notifyError } from "@/util/msg";
import { getSystemKey } from "@/system/config";
import { t } from "@/i18n/index";
import { watchEffect, ref, toRaw } from "vue";
const modelStore = useModelStore();
const props = defineProps(['labelId']);
const emit = defineEmits(["closeFn","refreshFn"]);
const emit = defineEmits(["closeFn", "refreshFn"]);
// function close() {
// emit("closeFn", false);
// }
const labelData:any = ref({
name : "",
zhdesc : "",
endesc : "",
const labelData: any = ref({
name: "",
zhdesc: "",
endesc: "",
family: "",
action:[],
action: "",
})
async function save() {
const saveData = toRaw(labelData.value)
if(saveData.name == "") {
if (saveData.name == "") {
notifyError(t('common.inputTitle'))
return;
}
if(saveData.action.length == 0){
if (saveData.action.length == 0) {
notifyError(t('model.selectCategory'))
return;
}
if(props.labelId > 0) {
if (props.labelId > 0) {
await db.update("modelslabel", props.labelId, saveData)
}else{
} else {
saveData.models = []
saveData.chanel = getSystemKey('currentChanel')
console.log(saveData)
//saveData.chanel = getSystemKey('currentChanel')
//console.log(saveData)
await db.addOne("modelslabel", saveData)
}
emit("closeFn", false);
emit("refreshFn", true);
}
watchEffect(async () => {
if(props.labelId > 0) {
if (props.labelId > 0) {
labelData.value = await db.getOne("modelslabel", props.labelId)
}else{
} else {
labelData.value = {
name : "",
zhdesc : "",
endesc : "",
name: "",
zhdesc: "",
endesc: "",
family: "",
action:[],
action: "",
}
}
@ -58,47 +58,25 @@ watchEffect(async () => {
<template>
<el-form label-width="100px" style="margin-top:12px">
<el-form-item :label="t('model.labelName')">
<el-input
v-model="labelData.name"
:placeholder="t('model.labelName')"
prefix-icon="House"
clearable
resize="none"
></el-input>
<el-input v-model="labelData.name" :placeholder="t('model.labelName')" prefix-icon="House" clearable
resize="none"></el-input>
</el-form-item>
<el-form-item :label="t('model.family')">
<el-input
v-model="labelData.family"
:placeholder="t('model.family')"
prefix-icon="HomeFilled"
clearable
resize="none"
></el-input>
<el-input v-model="labelData.family" :placeholder="t('model.family')" prefix-icon="HomeFilled" clearable
resize="none"></el-input>
</el-form-item>
<el-form-item :label="t('model.category')">
<el-select v-model="labelData.action" :multiple="true" :placeholder="t('model.selectCategory')">
<el-option
v-for="(item, key) in modelStore.cateList"
:key="key"
:label="t('model.'+item)"
:value="item"
/>
</el-select>
<el-select v-model="labelData.action" :placeholder="t('model.selectCategory')">
<el-option v-for="(item, key) in modelStore.cateList" :key="key" :label="t('model.' + item)" :value="item" />
</el-select>
</el-form-item>
<el-form-item :label="t('model.chineseDescription')">
<el-input
:placeholder="t('model.chineseDescription')"
v-model="labelData.zhdesc"
></el-input>
<el-input :placeholder="t('model.chineseDescription')" v-model="labelData.zhdesc"></el-input>
</el-form-item>
<el-form-item :label="t('model.englishDescription')">
<el-input
:placeholder="t('model.englishDescription')"
:row="3"
v-model="labelData.endesc"
></el-input>
<el-input :placeholder="t('model.englishDescription')" :row="3" v-model="labelData.endesc"></el-input>
</el-form-item>
<el-form-item>
<el-button type="primary" icon="CirclePlus" @click="save">{{ t('common.save') }}</el-button>

4
frontend/src/components/ai/DownModelInfo.vue

@ -30,13 +30,13 @@ const modelInfo = modelStore.getModelInfo(model);
<el-row justify="space-around">
<el-col :span="10" class="tc"><el-text>{{ t('model.modelEngine') }}</el-text></el-col>
<el-col :span="14">
<el-tag type="primary">{{modelInfo.engine}}</el-tag>
<el-tag type="primary">{{modelInfo.info.engine}}</el-tag>
</el-col>
</el-row>
<el-row justify="space-around" v-if="modelInfo.action">
<el-col :span="10" class="tc"><el-text>{{ t('model.applicableScope') }}</el-text></el-col>
<el-col :span="14">
<el-tag type="primary" v-for="item in modelInfo.action" style="margin-right: 5px;">{{t('model.' + item)}}</el-tag>
<el-tag type="primary">{{t('model.' + modelInfo.action)}}</el-tag>
</el-col>
</el-row>
<el-row justify="space-around" v-if="modelInfo.info.context_length">

25
frontend/src/components/ai/aimodel.vue

@ -39,7 +39,7 @@ async function downLabel(modelData: any, labelData: any) {
modelData = toRaw(modelData);
//console.log(modelData, labelData)
const saveData = {
model: modelData.model,
model: modelData.info.model,
label: labelData.name,
action: labelData.action,
engine: modelData.info.engine,
@ -59,6 +59,7 @@ async function saveBox(modelData: any) {
notifyError(t('model.chooseLabel'));
return;
}
//console.log(modelData)
downLabel(modelData, labelData);
}
async function download(saveData: any) {
@ -135,25 +136,15 @@ async function handleDown(modelData: any, completion: any) {
modelData.status = msg.status;
if (msg.total && msg.completed && msg.total > 0) {
if (msg.total == msg.completed) {
modelData.isLoading = 1;
modelData.progress = Math.ceil((msg.completed / msg.total) * 100);
if (modelData.progress == 100 || msg.total == msg.completed) {
msg.status = "success"
} else {
modelData.isLoading = 1;
modelData.progress = Math.ceil((msg.completed / msg.total) * 100);
}
} else {
modelData.progress = 0;
}
if (msg.status == "success") {
modelData.isLoading = 0;
modelData.progress = 0;
}
//console.log(modelData);
await modelStore.updateDownload(modelData);
if (msg.status == "success") {
modelStore.deleteDownload(modelData.model);
modelStore.setCurrentModel(toRaw(modelData.action), modelData.model);
}
} catch (error) {
console.error("An error occurred:", error);
break;
@ -165,10 +156,10 @@ async function deleteModel(modelData: any) {
modelData = toRaw(modelData);
//console.log(modelData)
try {
const res:any = await modelStore.deleteModelList(modelData);
notifySuccess(res);
await modelStore.deleteModelList(modelData);
notifySuccess(t('prompt.delSuccess'));
} catch (error: any) {
console.log(error);
//console.log(error);
notifyError(error.message);
}
}

16
frontend/src/components/ai/aisetting.vue

@ -61,7 +61,7 @@ const saveConfig = async () => {
let postData: any = []
if (config.value.dataDir.trim() != "") {
postData.push({
name: "dataDir",
name: "aiDir",
value: config.value.dataDir.trim(),
})
}
@ -71,6 +71,12 @@ const saveConfig = async () => {
value: config.value.ollamaUrl.trim(),
})
}
if (config.value.openaiUrl.trim() != "") {
postData.push({
name: "openaiUrl",
value: config.value.openaiUrl.trim(),
})
}
if (postData.length > 0) {
const postDatas = {
method: "POST",
@ -175,6 +181,12 @@ async function changeDir() {
clearable></el-input>
</div>
</el-form-item>
<el-form-item label="OpenAI URL">
<div class="slider-container">
<el-input v-model="config.openaiUrl" placeholder="OpenAI URL" prefix-icon="Notification"
clearable></el-input>
</div>
</el-form-item>
<el-form-item>
<el-button @click="saveConfig" type="info" plain>
@ -307,7 +319,7 @@ async function changeDir() {
}
.scrollbarSettingHeight {
height: 80vh;
height: 85vh;
padding-bottom: 30px;
}

4
frontend/src/components/localchat/ChatDomain.vue

@ -1,7 +1,7 @@
<template>
<div class="win11-msg-container">
<el-scrollbar>
<div v-if="store.navId < 2" class="user-list-area">
<div v-if="store.navId == 1" class="user-list-area">
<el-row class="user-list" justify="space-around" v-for="(msg, key) in store.contentList" :key="key"
v-if="store.contentList.length > 0">
<!-- <el-col :span="5" class="avatar-col">
@ -33,7 +33,7 @@
<el-empty v-else :image-size="100" description="消息列表为空" />
</div>
<div v-else class="user-list-area">
<div v-else-if="store.navId == 2" class="user-list-area">
<el-row justify="space-between">
<el-icon :size="18" @click="store.refreshUserList">
<RefreshRight />

15
frontend/src/components/localchat/ChatNav.vue

@ -2,6 +2,14 @@
<el-row>
<el-avatar shape="square" :size="40" class="userAvatar" src="/logo.png"/>
</el-row>
<el-row @click="store.handleSelect(0)">
<el-icon v-if="store.navId === 0" class="menu-icon-on">
<Promotion />
</el-icon>
<el-icon v-else class="menu-icon">
<Position />
</el-icon>
</el-row>
<el-row @click="store.handleSelect(1)">
<el-icon v-if="store.navId === 1" class="menu-icon-on">
<ChatLineRound />
@ -18,13 +26,6 @@
<User />
</el-icon>
</el-row>
<!-- <el-space direction="vertical" :size="20" class="win11-chat-nav">
<div :class="store.navId === item.index ? 'nav-item active' : 'nav-item'" v-for="item in store.navList" :key="item.index">
<el-icon size="18" @click="store.handleSelect(item.index)">
<component :is="item.icon" />
</el-icon>
</div>
</el-space> -->
</template>
<script setup lang="ts">

1
frontend/src/hook/useAi.ts

@ -45,6 +45,7 @@ export async function askAi(question: any, action: string) {
content: prompt
},
],
engine: model.info.engine,
model: model.model,
stream: false,
options: modelStore.chatConfig.creation,

1
frontend/src/i18n/lang/en.json

@ -256,6 +256,7 @@
"video": "Video",
"embed": "Embeding",
"embeddings": "Embeddings",
"reranker":"Reranker",
"tts": "Txt2Audio",
"audio": "Audio2Txt",
"assistant": "Assistant",

14
frontend/src/i18n/lang/zh.json

@ -109,7 +109,7 @@
"whiteBoard": "白板",
"piceditor": "图片编辑",
"gantt": "甘特图",
"aiHelper":"AI助手",
"aiHelper": "AI助手",
"aiModule": "模型管理",
"browser": "浏览器",
"aiSetting": "AI设置",
@ -161,7 +161,7 @@
"installSuccess": "安装成功!",
"uninstallSuccess": "卸载成功",
"downloadError": "下载失败!",
"downloadSuccess":"下载成功!",
"downloadSuccess": "下载成功!",
"cantStream": "暂不支持流下载!",
"setting": "设置",
"stop": "停止",
@ -259,6 +259,7 @@
"recording": "听录",
"video": "视频",
"embeddings": "嵌入",
"reranker": "排序",
"tts": "文字转声音",
"audio": "声音转文字",
"assistant": "助手",
@ -272,14 +273,6 @@
"downloading": "下载中",
"modelLabel": "模型标签",
"modelDown": "模型下载",
"help_label": "选择分类",
"help_labelDesc": "选择你想要下载的模型类目",
"help_showdown": "查看下载",
"help_showdownDesc": "点击此处查看下载列表和已下载的文件",
"help_adddown": "添加新的下载",
"help_adddownDesc": "点击此处可以添加新的下载,属于高级操作",
"help_addlabel": "添加新的标签",
"help_addlabelDesc": "点击此处可以添加新的标签,标签内没有下载列表方可删除",
"labelName": "标签名称",
"family": "家族",
"category": "分类",
@ -361,5 +354,4 @@
"tips_num_predict": "num_predict通常是指在文本生成任务中,指定模型生成的token数量或预测步数。简单来说,就是指定了生成文本的长度,单位通常是token(可能是词、子词或其他单位,依据模型而定)。<br />最小值1:表示生成最少一个token的文本,这在实际应用中可能意义不大,除非模型的输出是高度结构化的,单个token也能构成完整信息。<br />最大值5000:最大生成长度,适合于生成较短的文本片段,如简短的回答、总结或短句。实际应用中,num_predict的具体取值范围应根据模型的能力、应用场景的需求以及资源限制(如计算成本和响应时间)来设定。对于需要生成较长文本的任务,比如文章创作、故事生成等,num_predict的上限可能会设置得更高,比如几百甚至上千。但需要注意的是,随着生成长度的增加,不仅计算成本会上升,生成文本的连贯性和质量控制也会变得更加复杂。",
"tips_num_keep": "num_keep的值可以影响生成文本与原始输入的关联度和连贯性。较大的num_keep值有助于保持生成内容与输入的连续性和一致性,而较小的值则可能让模型生成更加自由、多变的文本。"
}
}

25
frontend/src/stores/labels/bgereranker.ts

@ -0,0 +1,25 @@
export const bgeRerankerLabels = {
name: "bge-reranker",
family: "bge",
action: "reranker",
models: [
{
model: "linux6200/bge-reranker-v2-m3",
params: {
"num_ctx": 4096,
"temperature": 1
},
info: {
engine: "ollama",
from: "ollama",
size: "1.2GB",
desk: "2GB",
cpu: "8GB",
gpu: "6GB",
quant: "f16"
}
},
],
zhdesc: "bge-reranker是BAAI开发的排序模型",
endesc: "bge is an reranker model developed by BAAI"
}

2
frontend/src/stores/labels/index.ts

@ -26,6 +26,7 @@ import { deepseekcoderLabels } from './deepseekcoder.ts'
import { starcoder2Labels } from './starcoder2.ts'
import { duckdbnsqlLabels } from './duckdbnsql.ts'
import { bgeLabels } from './bge.ts'
import { bgeRerankerLabels } from './bgereranker.ts'
import { dmetaLabels } from './dmeta.ts'
import { nomicLabels } from './nomic.ts'
import { snowflakeLabels } from './snowflake.ts'
@ -66,6 +67,7 @@ export const aiLabels = [
starcoder2Labels,
duckdbnsqlLabels,
bgeLabels,
bgeRerankerLabels,
dmetaLabels,
nomicLabels,
snowflakeLabels,

96
frontend/src/stores/labels/qwen.ts

@ -5,6 +5,102 @@ export const qwenLabels = {
zhdesc: "Qwen是阿里云基于transformer的一系列大型语言模型,在大量数据上进行预训练,包括网络文本、书籍、代码等。",
endesc: "Qwen is a series of transformer-based large language models by Alibaba Cloud, pre-trained on a large volume of data, including web texts, books, code, etc.",
models: [
{
model: "qwen2.5:0.5b",
params: {
top_p: 0.95,
stream: true,
num_keep: 5,
num_predict: 1,
top_k: 40,
temperature: 0.7,
stop: [
"<|im_start|>",
"<|im_end|>"
]
},
info: {
engine: "ollama",
from: "ollama",
size: "494MB",
desk: "1GB",
cpu: "8GB",
gpu: "6GB",
quant: "q4"
}
},
{
model: "qwen2.5:1.5b",
params: {
top_p: 0.95,
stream: true,
num_keep: 5,
num_predict: 1,
top_k: 40,
temperature: 0.7,
stop: [
"<|im_start|>",
"<|im_end|>"
]
},
info: {
engine: "ollama",
from: "ollama",
size: "1.54GB",
desk: "1.6GB",
cpu: "8GB",
gpu: "6GB",
quant: "q4"
}
},
{
model: "qwen2.5:3b",
params: {
top_p: 0.95,
stream: true,
num_keep: 5,
num_predict: 1,
top_k: 40,
temperature: 0.7,
stop: [
"<|im_start|>",
"<|im_end|>"
]
},
info: {
engine: "ollama",
from: "ollama",
size: "1.9GB",
desk: "2GB",
cpu: "16GB",
gpu: "8GB",
quant: "q4"
}
},
{
model: "qwen2.5:7b",
params: {
top_p: 0.95,
stream: true,
num_keep: 5,
num_predict: 1,
top_k: 40,
temperature: 0.7,
stop: [
"<|im_start|>",
"<|im_end|>"
]
},
info: {
engine: "ollama",
from: "ollama",
size: "4.7GB",
desk: "5GB",
cpu: "16GB",
gpu: "8GB",
quant: "q4"
}
},
{
model: "qwen2:0.5b",
params: {

5
frontend/src/stores/localchat.ts

@ -16,10 +16,6 @@ export const useLocalChatStore = defineStore('localChatStore', () => {
const showChooseFile = ref(false)
const currentPage = ref(1)
const pageSize = ref(50)
const navList = ref([
{ index: 1, lable: "消息列表", icon: "ChatDotRound", type: "success" },
{ index: 2, lable: "用户列表", icon: "UserFilled", type: "info" },
])
const navId = ref(1)
const sendInfo:any = ref()
const chatTargetId = ref(0)
@ -463,7 +459,6 @@ export const useLocalChatStore = defineStore('localChatStore', () => {
}
return {
userList,
navList,
sendInfo,
navId,
chatTargetId,

17
frontend/src/stores/model.ts

@ -76,12 +76,14 @@ export const useModelStore = defineStore('modelStore', () => {
if (existingModels.includes(d.model)) {
d.isdef = 1
}
if(d.action == ""){
d.action = "chat"
}
});
await db.clear("modelslist");
await db.addAll("modelslist", data);
modelList.value = data;
}
// 重新获取所有模型列表
}
async function refreshOllama() {
@ -159,13 +161,9 @@ export const useModelStore = defineStore('modelStore', () => {
});
await db.deleteByField("modelslist", "model", data.model)
if (data.isdef * 1 == 1) {
await setCurrentModel(data.action, "")
await setDefModel(data.action)
}
}
//await db.delete("modelslist", data.id)
//await getModelList()
}
function checkDownload(name: string) {
@ -178,7 +176,6 @@ export const useModelStore = defineStore('modelStore', () => {
} else {
updateDownload(data)
}
return data
}
function deleteDownload(model: string) {
@ -200,10 +197,12 @@ export const useModelStore = defineStore('modelStore', () => {
isLoading: modelData.isLoading ?? 0,
});
if (modelData.status === "success") {
//await addDownList(modelData);
modelData.isLoading = 0;
modelData.progress = 0;
deleteDownload(modelData.model);
await getModelList();
await setDefModel(modelData.action);
await checkLabelData(modelData);
await setCurrentModel(modelData.action, modelData.model);
}
}
}

34
frontend/src/stores/modelconfig.ts

@ -1,25 +1,25 @@
export const cateList: any = ["chat", "translation", "code", "img2txt", "image", "tts", "audio", "embeddings"]
export const cateList: any = ["chat", "translation", "code", "img2txt", "image", "tts", "audio", "embeddings", "reranker"]
export const modelEngines = [
{
name: "ollama",
cpp: "ollama",
needQuant: true
},
{
name: "llama",
cpp: "llama.cpp",
needQuant: true
},
{
name: "cortex",
cpp: "cortex.cpp",
needQuant: true
},
{
name: "llamafile",
cpp: "llamafile",
needQuant: false
},
// {
// name: "llama",
// cpp: "llama.cpp",
// needQuant: true
// },
// {
// name: "cortex",
// cpp: "cortex.cpp",
// needQuant: true
// },
// {
// name: "llamafile",
// cpp: "llamafile",
// needQuant: false
// },
{
name: "sd",
cpp: "stable-diffusion.cpp",
@ -42,7 +42,7 @@ export const netEngines = [
cpp: "gemini"
},
{
name: "giteeAI",
name: "GiteeAI",
cpp: "giteeAI",
},
{

26
frontend/src/system/config.ts

@ -148,15 +148,35 @@ export const getSystemConfig = (ifset = false) => {
'fourthEnd': '254'
}
}
if(!config.ollamaUrl) {
if (!config.ollamaUrl) {
config.ollamaUrl = 'http://localhost:11434'
}
if(!config.dataDir) {
if (!config.dataDir) {
config.dataDir = ''
}
if(!config.aiUrl){
if (!config.aiUrl) {
config.aiUrl = config.apiUrl
}
if (!config.openaiUrl) {
config.openaiUrl = 'https://api.openai.com/v1'
}
if (!config.aiKey) {
config.aiKey = {
"openai": "",
"gitee": "",
// "google": "",
// "baidu": "",
// "ali": "",
// "tencent": "",
// "bigmodel": "",
// "xai": "",
// "azure": "",
// "stability": "",
// "claude": "",
// "groq": ""
}
}
// 初始化桌面快捷方式列表,若本地存储中已存在则不进行覆盖
if (!config.desktopList) {
config.desktopList = [];

23
godo/model/api/gitee.go

@ -0,0 +1,23 @@
package api
import (
"fmt"
"godo/libs"
)
func GetGiteeChatUrl(model string) string {
return "https://ai.gitee.com/api/serverless/" + model + "/chat/completions"
}
func GetGiteeEmbeddingUrl(model string) string {
return "https://ai.gitee.com/api/serverless/" + model + "/embeddings"
}
func GetGiteeText2ImgUrl(model string) string {
return "https://ai.gitee.com/api/serverless/" + model + "/text-to-image"
}
func GetGiteeSecret() (string, error) {
secret, has := libs.GetConfig("giteeSecret")
if !has {
return "", fmt.Errorf("the gitee secret is not set")
}
return secret.(string), nil
}

30
godo/model/api/openai.go

@ -0,0 +1,30 @@
package api
import (
"fmt"
"godo/libs"
)
// 获取 OpenAI 聊天 API 的 URL
func GetOpenAIChatUrl() string {
return "https://api.openai.com/v1/chat/completions"
}
// 获取 OpenAI 文本嵌入 API 的 URL
func GetOpenAIEmbeddingUrl() string {
return "https://api.openai.com/v1/embeddings"
}
// 获取 OpenAI 文本转图像 API 的 URL
func GetOpenAIText2ImgUrl() string {
return "https://api.openai.com/v1/images/generations"
}
// 获取 OpenAI 密钥
func GetOpenAISecret() (string, error) {
secret, has := libs.GetConfig("openaiSecret")
if !has {
return "", fmt.Errorf("the openai secret is not set")
}
return secret.(string), nil
}

42
godo/model/convert.go

@ -1,42 +0,0 @@
package model
import (
"godo/libs"
"net/http"
"os"
"strings"
)
func ConvertOllama(w http.ResponseWriter, r *http.Request, req ReqBody) {
modelFile := "FROM " + req.Info.Path[0] + "\n"
modelFile += `TEMPLATE """` + req.Info.Template + `"""`
if req.Info.Parameters != "" {
parameters := strings.Split(req.Info.Parameters, "\n")
for _, param := range parameters {
modelFile += "\nPARAMETER " + param
}
}
url := GetOllamaUrl() + "/api/create"
postParams := map[string]string{
"name": req.Model,
"modelfile": modelFile,
}
ForwardHandler(w, r, postParams, url, "POST")
modelDir, err := GetModelDir(req.Model)
if err != nil {
libs.ErrorMsg(w, "GetModelDir")
return
}
// modelFilePath := filepath.Join(modelDir, "Modelfile")
// if err := os.WriteFile(modelFilePath, []byte(modelFile), 0644); err != nil {
// ErrMsg("WriteFile", err, w)
// return
// }
err = os.RemoveAll(modelDir)
if err != nil {
libs.ErrorMsg(w, "Error removing directory")
return
}
}

2
godo/model/down.go

@ -38,7 +38,7 @@ func Download(w http.ResponseWriter, r *http.Request) {
reqBody := ReqBody{}
err := json.NewDecoder(r.Body).Decode(&reqBody)
if err != nil {
libs.ErrorMsg(w, "first Decode request body error")
libs.ErrorMsg(w, "first Decode request body error:"+err.Error())
return
}
err = LoadConfig()

40
godo/model/libs.go

@ -4,7 +4,6 @@ import (
"encoding/json"
"fmt"
"godo/libs"
"log"
"net/http"
"net/url"
"os"
@ -54,17 +53,14 @@ func GetModelPath(urls string, model string, reqType string) (string, error) {
// 构建完整的文件路径
filePath := filepath.Join(modelDir, fileName)
if reqType == "local" {
dir, err := getOModelsDir()
if err != nil {
return "", err
}
dir := GetOllamaModelDir()
if strings.Contains(fileName, "sha256-") && len(fileName) == 71 {
filePath = filepath.Join(dir, "blobs", fileName)
log.Printf("====filePath1: %s", filePath)
//log.Printf("====filePath1: %s", filePath)
} else {
opName := getOpName(model)
filePath = filepath.Join(dir, "manifests", opName.Space, opName.LibPath, opName.Name, opName.Tag)
log.Printf("====filePath2: %s", filePath)
//log.Printf("====filePath2: %s", filePath)
}
}
return filePath, nil
@ -73,31 +69,33 @@ func Var(key string) string {
return strings.Trim(strings.TrimSpace(os.Getenv(key)), "\"'")
}
func GetHfModelDir() (string, error) {
dataDir := libs.GetDataDir()
return filepath.Join(dataDir, "hfmodels"), nil
aiDir, ok := libs.GetConfig("aiDir")
if ok {
return aiDir.(string), nil
} else {
dataDir := libs.GetDataDir()
return filepath.Join(dataDir, "aiModels"), nil
}
}
func GetOllamaModelDir() string {
// dataDir := libs.GetDataDir()
// return filepath.Join(dataDir, "models")
if s := Var("OLLAMA_MODELS"); s != "" {
return s
}
home, err := os.UserHomeDir()
if err != nil {
panic(err)
}
home, _ := os.UserHomeDir()
return filepath.Join(home, ".ollama", "models")
}
func getOModelsDir() (string, error) {
return GetOllamaModelDir(), nil
}
func GetOllamaUrl() string {
if s := strings.TrimSpace(Var("OLLAMA_HOST")); s != "" {
return s
}
return "http://localhost:11434"
ollamaUrl, ok := libs.GetConfig("ollamaUrl")
if ok {
return ollamaUrl.(string)
} else {
return "http://localhost:11434"
}
}
func ReplaceModelName(modelName string) string {
reg := regexp.MustCompile(`[/\s:]`)

108
godo/model/op.go → godo/model/ollama.go

@ -4,7 +4,6 @@ import (
"bytes"
"crypto/sha256"
"encoding/json"
"errors"
"fmt"
"godo/libs"
"io"
@ -13,7 +12,6 @@ import (
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"time"
@ -101,64 +99,6 @@ func humanReadableSize(size int64) string {
}
}
func Tagshandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
var reqBodies []ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok {
reqBodies = append(reqBodies, rb)
}
return true // 继续遍历
})
// 对reqBodies按CreatedAt降序排列
sort.Slice(reqBodies, func(i, j int) bool {
return reqBodies[i].CreatedAt.After(reqBodies[j].CreatedAt) // 降序排列
})
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func ShowHandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
model := r.URL.Query().Get("model")
if model == "" {
libs.ErrorMsg(w, "Model name is empty")
return
}
//log.Printf("ShowHandler: %s", model)
var reqBodies ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok && rb.Model == model {
reqBodies = rb
return false
}
return true
})
//log.Printf("ShowHandler: %s", reqBodies)
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func extractParameterSize(sizeStr string, model string) (float64, bool) {
// 尝试直接从原始sizeStr中提取数字,包括小数
if size, err := strconv.ParseFloat(strings.TrimSuffix(sizeStr, "B"), 64); err == nil {
@ -414,13 +354,10 @@ func getOpName(model string) OmodelPath {
func getManifests(model string) ([]string, error) {
res := []string{}
opName := getOpName(model)
modelsDir, err := getOModelsDir()
if err != nil {
return res, fmt.Errorf("failed to get user home directory: %w", err)
}
modelsDir := GetOllamaModelDir()
manifestsFile := filepath.Join(modelsDir, "manifests", opName.Space, opName.LibPath, opName.Name, opName.Tag)
if !libs.PathExists(manifestsFile) {
return res, fmt.Errorf("failed to get manifests file: %w", err)
return res, fmt.Errorf("failed to get manifests file: %s", manifestsFile)
}
res = append(res, manifestsFile)
var manifest ManifestV2
@ -450,19 +387,50 @@ func getManifests(model string) ([]string, error) {
}
func GetBlobsPath(digest string) (string, error) {
dir, err := getOModelsDir()
if err != nil {
return "", err
}
dir := GetOllamaModelDir()
// only accept actual sha256 digests
pattern := "^sha256[:-][0-9a-fA-F]{64}$"
re := regexp.MustCompile(pattern)
if digest != "" && !re.MatchString(digest) {
return "", errors.New("invalid digest format")
return "", fmt.Errorf("invalid digest format")
}
digest = strings.ReplaceAll(digest, ":", "-")
path := filepath.Join(dir, "blobs", digest)
return path, nil
}
func ConvertOllama(w http.ResponseWriter, r *http.Request, req ReqBody) {
modelFile := "FROM " + req.Info.Path[0] + "\n"
modelFile += `TEMPLATE """` + req.Info.Template + `"""`
if req.Info.Parameters != "" {
parameters := strings.Split(req.Info.Parameters, "\n")
for _, param := range parameters {
modelFile += "\nPARAMETER " + param
}
}
url := GetOllamaUrl() + "/api/create"
postParams := map[string]string{
"name": req.Model,
"modelfile": modelFile,
}
ForwardHandler(w, r, postParams, url, "POST")
modelDir, err := GetModelDir(req.Model)
if err != nil {
libs.ErrorMsg(w, "GetModelDir")
return
}
// modelFilePath := filepath.Join(modelDir, "Modelfile")
// if err := os.WriteFile(modelFilePath, []byte(modelFile), 0644); err != nil {
// ErrMsg("WriteFile", err, w)
// return
// }
err = os.RemoveAll(modelDir)
if err != nil {
libs.ErrorMsg(w, "Error removing directory")
return
}
}

61
godo/model/server.go

@ -1,6 +1,7 @@
package model
import (
"encoding/json"
"fmt"
"godo/libs"
"io"
@ -8,8 +9,68 @@ import (
"net/http"
"os"
"path/filepath"
"sort"
)
func Tagshandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
var reqBodies []ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok {
reqBodies = append(reqBodies, rb)
}
return true // 继续遍历
})
// 对reqBodies按CreatedAt降序排列
sort.Slice(reqBodies, func(i, j int) bool {
return reqBodies[i].CreatedAt.After(reqBodies[j].CreatedAt) // 降序排列
})
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func ShowHandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
model := r.URL.Query().Get("model")
if model == "" {
libs.ErrorMsg(w, "Model name is empty")
return
}
//log.Printf("ShowHandler: %s", model)
var reqBodies ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok && rb.Model == model {
reqBodies = rb
return false
}
return true
})
//log.Printf("ShowHandler: %s", reqBodies)
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
type DownserverStucct struct {
Path string `json:"path"`
}

20
godo/sys/setting.go

@ -51,14 +51,30 @@ func ConfigHandler(w http.ResponseWriter, r *http.Request) {
}
libs.SetConfig(req)
}
}
if req.Name == "webdavClient" {
} else if req.Name == "aiDir" {
aiPath := req.Value.(string)
aiDir, ok := libs.GetConfig("aiDir")
if !ok || aiDir != aiPath {
if !libs.PathExists(aiPath) {
libs.ErrorMsg(w, "The Path is not exists!")
return
}
err = os.Chmod(aiPath, 0755)
if err != nil {
libs.ErrorMsg(w, "The Path chmod is error!")
return
}
libs.SetConfig(req)
}
} else if req.Name == "webdavClient" {
libs.SetConfig(req)
err := webdav.InitWebdav()
if err != nil {
libs.ErrorMsg(w, "The webdav client init is error:"+err.Error())
return
}
} else {
libs.SetConfig(req)
}
}

Loading…
Cancel
Save