Browse Source

add ai model

master
godo 7 months ago
parent
commit
1e50d0eb82
  1. 75
      frontend/src/components/ai/aisetting.vue
  2. 333
      frontend/src/components/window/IframeFile.vue
  3. 1
      frontend/src/i18n/lang/en.json
  4. 1
      frontend/src/i18n/lang/zh.json
  5. 47
      frontend/src/stores/model.ts
  6. 11
      godo/libs/dir.go
  7. 28
      godo/model/chat.go
  8. 50
      godo/model/common.go
  9. 46
      godo/model/convert.go
  10. 155
      godo/model/data.go
  11. 258
      godo/model/down.go
  12. 118
      godo/model/libs.go
  13. 396
      godo/model/op.go
  14. 55
      godo/model/server.go
  15. 91
      godo/model/types.go

75
frontend/src/components/ai/aisetting.vue

@ -25,20 +25,20 @@ const modelStore = useModelStore();
// top_p top-k0.950.50.9 float top_p 0.9
const hoverTxt = {
dataDir: t('setting.tips_dataDir'),
apiUrl: t('setting.tips_apiUrl'),
dataDir: t('aisetting.tips_dataDir'),
apiUrl: t('aisetting.tips_apiUrl'),
contextLength: t("setting.tips_contextLength"),
top_k: t('setting.tips_top_k'),
top_p: t('setting.tips_top_p'),
temperature: t('setting.tips_temperature'),
frequency_penalty: t('setting.tips_frequency_penalty'),
presence_penalty: t('setting.tips_presence_penalty'),
num_predict: t('setting.tips_num_predict'),
num_keep: t('setting.tips_num_keep'),
top_k: t('aisetting.tips_top_k'),
top_p: t('aisetting.tips_top_p'),
temperature: t('aisetting.tips_temperature'),
frequency_penalty: t('aisetting.tips_frequency_penalty'),
presence_penalty: t('aisetting.tips_presence_penalty'),
num_predict: t('aisetting.tips_num_predict'),
num_keep: t('aisetting.tips_num_keep'),
};
// const systemStore = useSystemStore();
const config: any = ref({});
const chatConfig: any = ref({});
//const chatConfig: any = ref({});
const currentsModel: any = ref({});
const modelList = ref([]);
const pageLoading = ref(true);
@ -70,7 +70,7 @@ const saveConfig = async () => {
if (config.value.IpList.trim() != "") {
postData.push({
name: "dataDir",
value: config.value.IpList.trim().split("\n").map((line:string) => line.trim()),
value: config.value.IpList.trim().split("\n").map((line: string) => line.trim()),
})
}
if (postData.length > 0) {
@ -96,10 +96,7 @@ const saveConfig = async () => {
//modelStore.updateCurrentModels(modelList.value);
notifySuccess(t('common.saveSuccess'));
};
const saveChatConfig = () => {
//setChatConfig(chatConfig.value);
notifySuccess(t('common.saveSuccess'));
};
const changeConfig = async () => {
//console.log(v)
//setSystemKey('llmType',v)
@ -125,12 +122,12 @@ async function changeDir() {
<template>
<div v-loading="pageLoading">
<el-tabs v-model="activeName" class="setting-tabs" style="margin: 12px" @tab-click="handleClick">
<el-tab-pane :label="t('setting.modelSetting')" name="system">
<el-tab-pane :label="t('aisetting.modelSetting')" name="system">
<el-scrollbar class="scrollbarSettingHeight">
<el-form label-width="150px" style="padding: 0 30px 50px 0">
<el-form-item :label="t('setting.dataDir')">
<el-form-item :label="t('aisetting.dataDir')">
<div class="slider-container">
<el-input v-model="config.dataDir" :placeholder="t('setting.localDirHolder')" prefix-icon="Menu"
<el-input v-model="config.dataDir" :placeholder="t('aisetting.localDirHolder')" prefix-icon="Menu"
@click="changeDir()" clearable></el-input>
<el-popover placement="left" :width="400" trigger="click">
<template #reference>
@ -144,9 +141,9 @@ async function changeDir() {
</el-popover>
</div>
</el-form-item>
<el-form-item :label="t('setting.serverUrl')">
<el-form-item :label="t('aisetting.serverUrl')">
<div class="slider-container">
<el-input v-model="config.apiUrl" :placeholder="t('setting.serverUrl')" prefix-icon="Notification"
<el-input v-model="config.apiUrl" :placeholder="t('aisetting.serverUrl')" prefix-icon="Notification"
clearable></el-input>
<el-popover placement="left" :width="400" trigger="click">
<template #reference>
@ -160,16 +157,13 @@ async function changeDir() {
</el-popover>
</div>
</el-form-item>
<el-form-item :label="t('setting.ipSetting')">
<el-form-item :label="t('aisetting.ollamaUrl')">
<div class="slider-container">
<el-input v-model="config.ipList"
:placeholder="t('setting.ipHolder')"
:rows="3"
type="textarea"
prefix-icon="Notification"
<el-input v-model="config.ollamaUrl" :placeholder="t('aisetting.ollamaUrl')" prefix-icon="Notification"
clearable></el-input>
</div>
</el-form-item>
<el-form-item>
<el-button @click="saveConfig" type="info" plain>
{{ t("common.confim") }}
@ -178,23 +172,23 @@ async function changeDir() {
</el-form>
</el-scrollbar>
</el-tab-pane>
<el-tab-pane :label="t('setting.defModel')" name="modelDef">
<el-tab-pane :label="t('aisetting.defModel')" name="modelDef">
<el-scrollbar class="scrollbarSettingHeight">
<el-form label-width="150px" style="padding: 0 30px 50px 0">
<el-form-item :label="t('model.' + item)" v-for="(item, index) in config.modelCate" :key="index">
<el-form-item :label="t('model.' + item)" v-for="(item, index) in modelStore.cateList" :key="index">
<el-select v-model="currentsModel[item]" @change="(val: any) => modelStore.setCurrentModel(item, val)">
<el-option v-for="(el, key) in modelStore.getCurrentModelList(modelList, item)" :key="key" :label="el.model"
:value="el.model" />
<el-option v-for="(el, key) in modelStore.getCurrentModelList(modelList, item)" :key="key"
:label="el.model" :value="el.model" />
</el-select>
</el-form-item>
</el-form>
</el-scrollbar>
</el-tab-pane>
<el-tab-pane :label="t('setting.chatSetting')" name="chatSetting">
<el-tab-pane :label="t('aisetting.chatSetting')" name="chatSetting">
<el-tabs tab-position="left" v-model="activeModel" style="height: 490px" class="setting-tabs">
<el-tab-pane :name="item.key" :label="t('model.' + item.key)" v-for="item in chatConfig">
<el-tab-pane :name="item.key" :label="t('model.' + item.key)" v-for="item in modelStore.chatConfig">
<el-form label-width="100px" style="width: 500px">
<el-form-item :label="t('setting.contextLength')" v-if="item.contextLength" class="inline-layout">
<el-form-item :label="t('aisetting.contextLength')" v-if="item.contextLength" class="inline-layout">
<div class="slider-container">
<el-slider v-model="item.contextLength" :max="10" :min="1" :step="1" />
<el-popover placement="left" :width="400" trigger="click">
@ -210,7 +204,7 @@ async function changeDir() {
</div>
</el-form-item>
<el-form-item :label="t('setting.num_predict')" class="inline-layout">
<el-form-item :label="t('aisetting.num_predict')" class="inline-layout">
<div class="slider-container">
<el-slider v-model="item.num_predict" :max="5000" :min="1" />
<el-popover placement="left" :width="400" trigger="click">
@ -225,7 +219,7 @@ async function changeDir() {
</el-popover>
</div>
</el-form-item>
<el-form-item :label="t('setting.num_keep')" class="inline-layout">
<el-form-item :label="t('aisetting.num_keep')" class="inline-layout">
<div class="slider-container">
<el-slider v-model="item.num_keep" :max="500" :min="1" />
<el-popover placement="left" :width="400" trigger="click">
@ -240,7 +234,7 @@ async function changeDir() {
</el-popover>
</div>
</el-form-item>
<el-form-item :label="t('setting.top_k')" class="inline-layout">
<el-form-item :label="t('aisetting.top_k')" class="inline-layout">
<div class="slider-container">
<el-slider v-model="item.top_k" :max="100" :min="1" />
<el-popover placement="left" :width="400" trigger="click">
@ -255,7 +249,7 @@ async function changeDir() {
</el-popover>
</div>
</el-form-item>
<el-form-item :label="t('setting.top_p')" class="inline-layout">
<el-form-item :label="t('aisetting.top_p')" class="inline-layout">
<div class="slider-container">
<el-slider v-model="item.top_p" :max="1" :min="0.01" :step="0.01" />
<el-popover placement="left" :width="400" trigger="click">
@ -270,7 +264,7 @@ async function changeDir() {
</el-popover>
</div>
</el-form-item>
<el-form-item :label="t('setting.temperature')" class="inline-layout">
<el-form-item :label="t('aisetting.temperature')" class="inline-layout">
<div class="slider-container">
<el-slider v-model="item.temperature" :max="0.99" :min="0.01" :step="0.01" />
<el-popover placement="left" :width="400" trigger="click">
@ -285,11 +279,6 @@ async function changeDir() {
</el-popover>
</div>
</el-form-item>
<el-form-item>
<el-button @click="saveChatConfig" type="info" plain>
{{ t("common.confim") }}
</el-button>
</el-form-item>
</el-form>
</el-tab-pane>
</el-tabs>

333
frontend/src/components/window/IframeFile.vue

@ -1,189 +1,184 @@
<template>
<iframe
class="setiframe"
allow="fullscreen"
ref="storeRef"
:src="src"
></iframe>
<iframe class="setiframe" allow="fullscreen" ref="storeRef" :src="src"></iframe>
</template>
<script lang="ts" setup name="IframeFile">
//@ts-ignore
import { BrowserWindow, Dialog, Notify, System } from "@/system";
import { getSplit, getSystemConfig, setSystemKey } from "@/system/config";
import { base64ToBuffer, isBase64 } from "@/util/file";
import { isShareFile } from "@/util/sharePath.ts";
import { inject, onMounted, onUnmounted, ref, toRaw } from "vue";
const SP = getSplit();
//@ts-ignore
import { BrowserWindow, Dialog, Notify, System } from "@/system";
import { getSplit, getSystemConfig, setSystemKey } from "@/system/config";
import { base64ToBuffer, isBase64 } from "@/util/file";
import { isShareFile } from "@/util/sharePath.ts";
import { inject, onMounted, onUnmounted, ref, toRaw } from "vue";
const SP = getSplit();
const sys: any = inject<System>("system");
const win: any = inject<BrowserWindow>("browserWindow");
const props = defineProps({
src: {
type: String,
default: "",
},
eventType: {
type: String,
default: "",
},
ext: {
type: String,
default: "md",
},
});
//console.log('iframe: ', props);
const sys: any = inject<System>("system");
const win: any = inject<BrowserWindow>("browserWindow");
const props = defineProps({
src: {
type: String,
default: "",
},
eventType: {
type: String,
default: "",
},
ext: {
type: String,
default: "md",
},
});
//console.log('iframe: ', props);
//console.log(props);
//let path = win?.config?.path;
// let currentPath = ref('')
const storeRef = ref<HTMLIFrameElement | null>(null);
let hasInit = false;
const eventHandler = async (e: MessageEvent) => {
const eventData = e.data;
//console.log(props);
//let path = win?.config?.path;
// let currentPath = ref('')
const storeRef = ref<HTMLIFrameElement | null>(null);
let hasInit = false;
const eventHandler = async (e: MessageEvent) => {
const eventData = e.data;
if (eventData.type == props.eventType) {
let data = JSON.parse(eventData.data);
let title = data.title;
let path;
let ext: any = props.ext;
if (ext instanceof Array) {
ext = ext[0];
}
if (data.ext) {
ext = data.ext;
}
// console.log(ext)
// console.log(data)
if (win.config && win.config.path) {
path = win.config.path;
//1
// let fileTitleArr = path.split(SP).pop().split(".");
// let oldExt = fileTitleArr.pop();
// let fileTitle = fileTitleArr.join(".");
// if (fileTitle != title) {
// path = path.replace(fileTitle, title);
// }
// if (oldExt != ext) {
// path = path.replace("." + oldExt, "." + ext);
// }
} else {
path = `${SP}C${SP}Users${SP}Desktop${SP}${title}.${ext}`;
}
//
const isShare = ref(false);
const isWrite = ref(0);
if (isShareFile(path)) {
const file = await sys?.fs.getShareInfo(path);
isShare.value = true;
isWrite.value = file.fs.sender === getSystemConfig().userInfo.id ? 1 : file.fs.is_write;
if (
!isWrite.value &&
file.fs.sender !== getSystemConfig().userInfo.id
) {
new Notify({
title: "提示",
content: "该文件没有编辑权限",
});
return;
}
} else if (await sys?.fs.exists(path)) {
let res = await Dialog.showMessageBox({
type: "info",
if (eventData.type == props.eventType) {
let data = JSON.parse(eventData.data);
let title = data.title;
let path;
let ext: any = props.ext;
if (ext instanceof Array) {
ext = ext[0];
}
if (data.ext) {
ext = data.ext;
}
// console.log(ext)
// console.log(data)
if (win.config && win.config.path) {
path = win.config.path;
//1
// let fileTitleArr = path.split(SP).pop().split(".");
// let oldExt = fileTitleArr.pop();
// let fileTitle = fileTitleArr.join(".");
// if (fileTitle != title) {
// path = path.replace(fileTitle, title);
// }
// if (oldExt != ext) {
// path = path.replace("." + oldExt, "." + ext);
// }
} else {
path = `${SP}C${SP}Users${SP}Desktop${SP}${title}.${ext}`;
}
//
const isShare = ref(false);
const isWrite = ref(0);
if (isShareFile(path)) {
const file = await sys?.fs.getShareInfo(path);
isShare.value = true;
isWrite.value = file.fs.sender === getSystemConfig().userInfo.id ? 1 : file.fs.is_write;
if (
!isWrite.value &&
file.fs.sender !== getSystemConfig().userInfo.id
) {
new Notify({
title: "提示",
message: "存在相同的文件名-" + title,
buttons: ["覆盖文件?", "取消"],
content: "该文件没有编辑权限",
});
//console.log(res)
if (res.response > 0) {
return;
}
}
if (typeof data.content === "string") {
if (data.content.indexOf(";base64,") > -1) {
const parts = data.content.split(";base64,");
data.content = parts[1];
}
if (isBase64(data.content)) {
data.content = base64ToBuffer(data.content);
//console.log(data.content)
}
return;
}
const res = isShare.value
? await sys?.fs.writeShareFile(
path,
data.content,
isWrite.value
)
: await sys?.fs.writeFile(path, data.content);
// console.log("", res, isShare);
new Notify({
} else if (await sys?.fs.exists(path)) {
let res = await Dialog.showMessageBox({
type: "info",
title: "提示",
content: res.message
// content: res.code === 0 ? "" : res.message,
message: "存在相同的文件名-" + title,
buttons: ["覆盖文件?", "取消"],
});
sys.refershAppList();
} else if (eventData.type == "initSuccess") {
if (hasInit) {
//console.log(res)
if (res.response > 0) {
return;
}
hasInit = true;
let content = win?.config?.content;
let title = win.getTitle();
// console.log("win.config;", win?.config);
// console.log(title);
title = title.split(SP).pop();
if (!content && win?.config.path) {
const file = getSystemConfig().file;
const header = {
salt: file.salt,
pwd: file.pwd,
};
content = await sys?.fs.readFile(win?.config.path, header);
}
if (typeof data.content === "string") {
if (data.content.indexOf(";base64,") > -1) {
const parts = data.content.split(";base64,");
data.content = parts[1];
}
content = toRaw(content);
if (content && content !== "") {
storeRef.value?.contentWindow?.postMessage(
{
type: "init",
data: { content, title },
},
"*"
);
} else {
storeRef.value?.contentWindow?.postMessage(
{
type: "start",
title,
},
"*"
);
if (isBase64(data.content)) {
data.content = base64ToBuffer(data.content);
//console.log(data.content)
}
}
};
//
const delFileInputPwd = async () => {
let fileInputPwd = getSystemConfig().fileInputPwd;
const currentPath = win.config.path;
const temp = fileInputPwd.filter(
(item: any) => item.path !== currentPath
);
setSystemKey("fileInputPwd", temp);
};
onMounted(() => {
window.addEventListener("message", eventHandler);
});
onUnmounted(async () => {
await delFileInputPwd();
window.removeEventListener("message", eventHandler);
});
const res = isShare.value
? await sys?.fs.writeShareFile(
path,
data.content,
isWrite.value
)
: await sys?.fs.writeFile(path, data.content);
// console.log("", res, isShare);
new Notify({
title: "提示",
content: res.message
// content: res.code === 0 ? "" : res.message,
});
sys.refershAppList();
} else if (eventData.type == "initSuccess") {
if (hasInit) {
return;
}
hasInit = true;
let content = win?.config?.content;
let title = win.getTitle();
// console.log("win.config;", win?.config);
// console.log(title);
title = title.split(SP).pop();
if (!content && win?.config.path) {
const file = getSystemConfig().file;
const header = {
salt: file.salt,
pwd: file.pwd,
};
content = await sys?.fs.readFile(win?.config.path, header);
}
content = toRaw(content);
if (content && content !== "") {
storeRef.value?.contentWindow?.postMessage(
{
type: "init",
data: { content, title },
},
"*"
);
} else {
storeRef.value?.contentWindow?.postMessage(
{
type: "start",
title,
},
"*"
);
}
}
};
//
const delFileInputPwd = async () => {
let fileInputPwd = getSystemConfig().fileInputPwd;
const currentPath = win.config.path;
const temp = fileInputPwd.filter(
(item: any) => item.path !== currentPath
);
setSystemKey("fileInputPwd", temp);
};
onMounted(() => {
window.addEventListener("message", eventHandler);
});
onUnmounted(async () => {
await delFileInputPwd();
window.removeEventListener("message", eventHandler);
});
</script>
<style scoped>
.setiframe {
width: 100%;
height: 100%;
border: none;
}
.setiframe {
width: 100%;
height: 100%;
border: none;
}
</style>

1
frontend/src/i18n/lang/en.json

@ -327,6 +327,7 @@
"defModel": "Default Model",
"chatSetting": "Chat Setting",
"serverUrl": "Server Url",
"ollamaUrl": "Ollama Url",
"chatModel": "ChatModel",
"eyeModel": "EyeModel",
"transModel": "TranslationModel",

1
frontend/src/i18n/lang/zh.json

@ -330,6 +330,7 @@
"chatSetting": "对话设置",
"systemSetting": "系统设置",
"serverUrl": "服务器地址",
"ollamaUrl": "Ollama地址",
"chatModel": "聊天模型",
"eyeModel": "视觉模型",
"transModel": "翻译模型",

47
frontend/src/stores/model.ts

@ -45,6 +45,51 @@ export const useModelStore = defineStore('modelStore', () => {
const cateList: any = ["chat", "translation", "code", "img2txt", "image", "tts", "audio", "embeddings"]
const modelList: any = ref([])
const downList: any = ref([])
const chatConfig: any = ref({
chat : {
key:"chat",
contextLength: 10,
num_keep: 5, //保留多少个最有可能的预测结果。这与top_k一起使用,决定模型在生成下一个词时考虑的词汇范围。
num_predict: 3, //生成多少个预测结果
top_p: 0.95,
top_k: 40, //影响生成的随机性。较高的top_k值将使模型考虑更多的词汇
temperature: 0.7, //影响生成的随机性。较低的温度产生更保守的输出,较高的温度产生更随机的输出。
},
translation: {
key:"translation",
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
creation:{
key:"creation",
num_keep: 3,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
knowledge:{
key:"knowledge",
contextLength: 10,
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
},
spoken:{
key:"spoken",
contextLength: 10,
num_keep: 5,
num_predict: 1,
top_k: 40,
top_p: 0.95,
temperature: 0.2,
}
})
async function getLabelCate(cateName: string) {
const list = await getLabelList()
@ -223,6 +268,7 @@ export const useModelStore = defineStore('modelStore', () => {
downList,
modelEngines,
llamaQuant,
chatConfig,
getList,
getModelList,
getModelInfo,
@ -251,6 +297,7 @@ export const useModelStore = defineStore('modelStore', () => {
storage: localStorage,
paths: [
"downList",
"chatConfig"
]
}, // name 字段用localstorage存储
],

11
godo/libs/dir.go

@ -116,6 +116,17 @@ func GetStaticDir() string {
}
return staticPath
}
func GetDataDir() string {
homeDir, err := GetAppDir()
if err != nil {
return "static"
}
staticPath := filepath.Join(homeDir, "data")
if !PathExists(staticPath) {
os.MkdirAll(staticPath, 0755)
}
return staticPath
}
func GetCacheDir() string {
homeDir, err := GetAppDir()
if err != nil {

28
godo/model/chat.go

@ -0,0 +1,28 @@
package model
import (
"encoding/json"
"godo/libs"
"net/http"
)
func ChatHandler(w http.ResponseWriter, r *http.Request) {
url := GetOllamaUrl() + "/v1/chat/completions"
var request interface{}
err := json.NewDecoder(r.Body).Decode(&request)
if err != nil {
libs.ErrorMsg(w, err.Error())
return
}
ForwardHandler(w, r, request, url, "POST")
}
func EmbeddingHandler(w http.ResponseWriter, r *http.Request) {
url := GetOllamaUrl() + "/api/embeddings"
var request interface{}
err := json.NewDecoder(r.Body).Decode(&request)
if err != nil {
libs.ErrorMsg(w, err.Error())
return
}
ForwardHandler(w, r, request, url, "POST")
}

50
godo/model/common.go

@ -0,0 +1,50 @@
package model
import (
"bytes"
"encoding/json"
"godo/libs"
"io"
"log"
"net/http"
)
func ForwardHandler(w http.ResponseWriter, r *http.Request, reqBody interface{}, url string, method string) {
payloadBytes, err := json.Marshal(reqBody)
if err != nil {
libs.ErrorMsg(w, "Error marshaling payload")
return
}
// 创建POST请求,复用原始请求的上下文(如Cookies)
req, err := http.NewRequestWithContext(r.Context(), method, url, bytes.NewBuffer(payloadBytes))
if err != nil {
libs.ErrorMsg(w, "Failed to create request")
return
}
req.Header.Set("Content-Type", "application/json")
// 发送请求
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
libs.ErrorMsg(w, "Failed to send request")
return
}
defer resp.Body.Close()
// 将外部服务的响应内容原封不动地转发给客户端
for k, v := range resp.Header {
for _, value := range v {
w.Header().Add(k, value)
}
}
w.WriteHeader(resp.StatusCode)
//log.Printf("resp.Body: %v", resp.Body)
_, err = io.Copy(w, resp.Body)
if err != nil {
// 如果Copy过程中出错,尝试发送一个错误响应给客户端
http.Error(w, "Error forwarding response", http.StatusInternalServerError)
log.Printf("Error forwarding response body: %v", err)
return
}
}

46
godo/model/convert.go

@ -0,0 +1,46 @@
package model
import (
"fmt"
"godo/libs"
"net/http"
"os"
)
func ConvertOllama(w http.ResponseWriter, r *http.Request, req ReqBody) {
modelFile := "FROM " + req.Paths[0] + "\n"
modelFile += `TEMPLATE """` + req.Info["template"].(string) + `"""`
if parameters, ok := req.Info["parameters"].([]interface{}); ok {
for _, param := range parameters {
if strParam, ok := param.(string); ok {
modelFile += "\nPARAMETER " + strParam
} else {
// 处理非字符串的情况,根据需要可以选择忽略或报告错误
fmt.Fprintf(os.Stderr, "Unexpected parameter type: %T\n", param)
}
}
}
url := GetOllamaUrl() + "/api/create"
postParams := map[string]string{
"name": req.Model,
"modelfile": modelFile,
}
ForwardHandler(w, r, postParams, url, "POST")
modelDir, err := GetModelDir(req.Model)
if err != nil {
libs.ErrorMsg(w, "GetModelDir")
return
}
// modelFilePath := filepath.Join(modelDir, "Modelfile")
// if err := os.WriteFile(modelFilePath, []byte(modelFile), 0644); err != nil {
// ErrMsg("WriteFile", err, w)
// return
// }
err = os.RemoveAll(modelDir)
if err != nil {
libs.ErrorMsg(w, "Error removing directory")
return
}
}

155
godo/model/data.go

@ -0,0 +1,155 @@
package model
import (
"encoding/json"
"fmt"
"godo/libs"
"os"
"path/filepath"
"sync"
)
var reqBodyMap = sync.Map{}
func GetConfigFile() (string, error) {
modelDir, err := libs.GetAppDir()
if err != nil {
return "", err
}
if !libs.PathExists(modelDir) {
os.MkdirAll(modelDir, 0755)
}
configFile := filepath.Join(modelDir, "model.json")
if !libs.PathExists(configFile) {
// 如果文件不存在,则创建一个空的配置文件
err := os.WriteFile(configFile, []byte("[]"), 0644)
if err != nil {
return "", err
}
}
return configFile, nil
}
// LoadConfig 从文件加载所有ReqBody到映射中,如果文件不存在则创建一个空文件
func LoadConfig() error {
filePath, err := GetConfigFile()
if err != nil {
return err
}
var reqBodies []ReqBody
content, err := os.ReadFile(filePath)
if err != nil {
return err
}
err = json.Unmarshal(content, &reqBodies)
if err != nil {
return err
}
for _, reqBody := range reqBodies {
reqBodyMap.Store(reqBody.Model, reqBody)
}
//log.Printf("Load config file success %v", reqBodyMap)
return nil
}
// SaveReqBodiesToFile 将映射中的所有ReqBody保存回文件
func SaveConfig() error {
filePath, err := GetConfigFile()
if err != nil {
return err
}
var reqBodies []ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb := value.(ReqBody)
reqBodies = append(reqBodies, rb)
return true
})
// 使用 json.MarshalIndent 直接获取内容的字节切片
content, err := json.MarshalIndent(reqBodies, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal reqBodies to JSON: %w", err)
}
// log.Printf("====content: %s", string(content))
// 将字节切片直接写入文件,避免了 string(content) 的冗余转换
if err := os.WriteFile(filePath, content, 0644); err != nil {
return fmt.Errorf("failed to write to file: %w", err)
}
return nil
}
func GetModel(Model string) (ReqBody, bool) {
value, ok := reqBodyMap.Load(Model)
if ok {
return value.(ReqBody), true
}
return ReqBody{}, false
}
func ExistModel(Model string) bool {
_, exists := reqBodyMap.Load(Model)
return exists
}
func SetModel(reqBody ReqBody) error {
reqBodyMap.Store(reqBody.Model, reqBody)
//log.Println("=====SetModel", reqBody.Model)
if err := SaveConfig(); err != nil {
return fmt.Errorf("failed to save updated model configuration: %w", err)
}
return nil
}
func GetModelByDownloadUrl(downloadUrl string) (ReqBody, bool) {
var matchedReqBody ReqBody
found := false
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok && rb.Info["md5url"] == downloadUrl {
matchedReqBody = rb
found = true
return false // Stop iteration once a match is found
}
return true // Continue iteration
})
return matchedReqBody, found
}
func UpdateModel(reqBody ReqBody) error {
_, loaded := reqBodyMap.Load(reqBody.Model)
if !loaded {
return fmt.Errorf("model directory %s not found", reqBody.Model)
}
reqBodyMap.Store(reqBody.Model, reqBody)
if err := SaveConfig(); err != nil {
return fmt.Errorf("failed to save updated model configuration: %w", err)
}
return nil
}
func AddModel(Model string, reqBody ReqBody) error {
_, loaded := reqBodyMap.Load(Model)
if loaded {
return fmt.Errorf("model directory %s already exists", Model)
}
reqBodyMap.Store(Model, reqBody)
if err := SaveConfig(); err != nil {
return fmt.Errorf("failed to save new model configuration: %w", err)
}
return nil
}
func DeleteModel(Model string) error {
_, loaded := reqBodyMap.Load(Model)
if loaded {
reqBodyMap.Delete(Model)
}
if err := SaveConfig(); err != nil {
return fmt.Errorf("failed to delete model configuration: %w", err)
}
return nil
}

258
godo/model/down.go

@ -0,0 +1,258 @@
package model
import (
"crypto/md5"
"encoding/hex"
"encoding/json"
"godo/libs"
"io"
"log"
"net/http"
"os"
"strings"
"sync"
"time"
"github.com/cavaliergopher/grab/v3"
)
const (
concurrency = 6 // 并发下载数
)
var downloads = make(map[string]*grab.Response)
var downloadsMutex sync.Mutex
//var cancelDownloads = make(map[string]context.CancelFunc)
func noticeSuccess(w http.ResponseWriter) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(map[string]string{"status": "success"})
//log.Println("Download starting!")
}
func Download(w http.ResponseWriter, r *http.Request) {
reqBody := ReqBody{
Info: make(map[string]interface{}),
}
err := json.NewDecoder(r.Body).Decode(&reqBody)
if err != nil {
libs.ErrorMsg(w, "first Decode request body error")
return
}
err = LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
_, exitsModel := GetModel(reqBody.Model)
if exitsModel {
noticeSuccess(w)
return
}
if reqBody.From == "ollama" {
setOllamaInfo(w, r, reqBody)
return
}
var paths []string
var tsize int64
for _, urls := range reqBody.Url {
urls = replaceUrl(urls)
if !strings.HasPrefix(strings.ToLower(urls), "http://") && !strings.HasPrefix(strings.ToLower(urls), "https://") {
fileInfo, err := os.Stat(urls)
if err != nil {
libs.ErrorMsg(w, "Get model path error")
return
}
tsize += fileInfo.Size()
paths = append(paths, urls)
continue
}
filePath, err := GetModelPath(urls, reqBody.Model, reqBody.Type)
//log.Printf("filePath is %s", filePath)
if err != nil {
libs.ErrorMsg(w, "Get model path error")
return
}
paths = append(paths, filePath)
md5url := md5Url(urls)
if rsp, ok := downloads[md5url]; ok {
// 如果URL正在下载,跳过创建新的下载器实例
go trackProgress(w, rsp, reqBody, md5url)
return
}
// 创建新的下载器实例
client := grab.NewClient()
client.HTTPClient = &http.Client{
Transport: &http.Transport{
MaxIdleConnsPerHost: concurrency, // 可选,设置并发连接数
},
}
log.Printf("filePath is %s", filePath)
// 创建下载请求
req, err := grab.NewRequest(filePath, urls)
if err != nil {
libs.ErrorMsg(w, "Invalid download URL")
return
}
resp := client.Do(req)
downloads[md5url] = resp
//log.Printf("Download urls: %v\n", reqBody.DownloadUrl)
// // 跟踪进度
go trackProgress(w, resp, reqBody, md5url)
tsize += resp.Size()
// 等待下载完成并检查错误
if err := resp.Err(); err != nil {
libs.ErrorMsg(w, "Download failed")
return
}
}
delUrls(reqBody.Url)
if tsize <= 0 {
libs.ErrorMsg(w, "Download size is zero")
return
}
reqBody.Paths = paths
reqBody.Status = "success"
reqBody.CreatedAt = time.Now()
reqBody.Info["tsize"] = tsize
reqBody.Info["size"] = humanReadableSize(tsize)
if reqBody.Type == "llm" {
ConvertOllama(w, r, reqBody)
reqBody.From = "ollama"
reqBody.Paths = []string{}
}
if err := SetModel(reqBody); err != nil {
libs.ErrorMsg(w, "Set model error")
return
}
noticeSuccess(w)
}
func trackProgress(w http.ResponseWriter, resp *grab.Response, reqBody ReqBody, md5url string) {
defer func() {
if r := recover(); r != nil {
log.Printf("Recovered panic in trackProgress: %v", r)
}
downloadsMutex.Lock()
defer downloadsMutex.Unlock()
delete(downloads, md5url)
}()
ticker := time.NewTicker(100 * time.Millisecond)
defer ticker.Stop()
flusher, ok := w.(http.Flusher)
if !ok {
log.Printf("Streaming unsupported")
http.Error(w, "Streaming unsupported", http.StatusInternalServerError)
return
}
for {
select {
case <-ticker.C:
fp := FileProgress{
Progress: resp.Progress(),
IsFinished: resp.IsComplete(),
Total: resp.Size(),
Current: resp.BytesComplete(),
Status: "loading",
}
//log.Printf("Progress: %v", fp)
if resp.IsComplete() && fp.Current == fp.Total {
fp.Status = "loaded"
}
jsonBytes, err := json.Marshal(fp)
if err != nil {
log.Printf("Error marshaling FileProgress to JSON: %v", err)
continue
}
if w != nil {
io.WriteString(w, string(jsonBytes))
w.Write([]byte("\n"))
flusher.Flush()
} else {
log.Println("ResponseWriter is nil, cannot send progress")
}
if fp.Status == "loaded" {
return
}
}
}
}
func md5Url(url string) string {
hasher := md5.New()
hasher.Write([]byte(url))
return hex.EncodeToString(hasher.Sum(nil))
}
func delUrls(reqUrl []string) {
if len(reqUrl) > 0 {
downloadsMutex.Lock()
defer downloadsMutex.Unlock()
for _, urls := range reqUrl {
urls = replaceUrl(urls)
md5url := md5Url(urls)
delete(downloads, md5url)
}
}
}
func DeleteFileHandle(w http.ResponseWriter, r *http.Request) {
var reqBody ReqBody
err := json.NewDecoder(r.Body).Decode(&reqBody)
if err != nil {
libs.ErrorMsg(w, "Decode request body error: ")
return
}
err = LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error: ")
return
}
if err := DeleteModel(reqBody.Model); err != nil {
libs.ErrorMsg(w, "Error deleting model")
return
}
if reqBody.Engine == "ollama" {
postQuery := map[string]interface{}{"name": reqBody.Model}
url := GetOllamaUrl() + "/api/delete"
ForwardHandler(w, r, postQuery, url, "DELETE")
return
}
delUrls(reqBody.Url)
// 尝试删除目录,注意这会递归删除目录下的所有内容
//dirPath := filepath.Dir(filePath)
dirPath, err := GetModelDir(reqBody.Model)
if err != nil {
libs.ErrorMsg(w, "GetModelDir error")
return
}
//log.Printf("delete dirpath %v", dirPath)
err = os.RemoveAll(dirPath)
if err != nil && !os.IsNotExist(err) {
libs.ErrorMsg(w, "Error removing directory")
return
} else if err == nil {
log.Printf("Deleted directory: %s", dirPath)
} else {
// 如果目录不存在,这通常是可以接受的,不需要错误消息
log.Printf("Directory does not exist: %s", dirPath)
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(map[string]int{"code": 0})
}
func replaceUrl(url string) string {
return strings.ReplaceAll(url, "/blob/main/", "/resolve/main/")
}

118
godo/model/libs.go

@ -0,0 +1,118 @@
package model
import (
"encoding/json"
"fmt"
"godo/libs"
"log"
"net/http"
"net/url"
"os"
"path/filepath"
"regexp"
"strings"
)
func GetDownDir(modelPath string) (string, error) {
baseDir, err := GetHfModelDir()
if err != nil {
return "", err
}
modelDir := filepath.Join(baseDir, modelPath)
if !libs.PathExists(modelDir) {
os.MkdirAll(modelDir, 0755)
}
return modelDir, nil
}
func GetModelDir(model string) (string, error) {
modelName := ReplaceModelName(model)
modelDir, err := GetDownDir(modelName)
if err != nil {
return "", err
}
return modelDir, nil
}
func GetModelPath(urls string, model string, reqType string) (string, error) {
modelDir, err := GetModelDir(model)
if err != nil {
return "", err
}
//filePath := filepath.Join(modelDir, filepath.Base(reqBody.DownloadUrl))
//log.Printf("====url: %s", urls)
var fileName string
pathParts := strings.Split(urls, "/")
if len(pathParts) > 0 { // 确保路径有部分可分割
fileName = pathParts[len(pathParts)-1] // 获取路径最后一部分
} else {
parsedUrl, err := url.Parse(urls)
if err != nil {
return "", fmt.Errorf("failed to parse URL: %w", err)
}
urlPath := parsedUrl.Path
fileName = filepath.Base(urlPath)
}
// 构建完整的文件路径
filePath := filepath.Join(modelDir, fileName)
if reqType == "local" {
dir, err := getOModelsDir()
if err != nil {
return "", err
}
if strings.Contains(fileName, "sha256-") && len(fileName) == 71 {
filePath = filepath.Join(dir, "blobs", fileName)
log.Printf("====filePath1: %s", filePath)
} else {
opName := getOpName(model)
filePath = filepath.Join(dir, "manifests", opName.Space, opName.LibPath, opName.Name, opName.Tag)
log.Printf("====filePath2: %s", filePath)
}
}
return filePath, nil
}
func GetHfModelDir() (string, error) {
dataDir := libs.GetDataDir()
return filepath.Join(dataDir, "hfmodels"), nil
}
func GetOllamaModelDir() string {
dataDir := libs.GetDataDir()
return filepath.Join(dataDir, "models")
}
func getOModelsDir() (string, error) {
return GetOllamaModelDir(), nil
}
func GetOllamaUrl() string {
return "http://localhost:11434"
}
func ReplaceModelName(modelName string) string {
reg := regexp.MustCompile(`[/\s:]`)
return reg.ReplaceAllString(modelName, "")
}
// ModelConfigFromRequest 解析HTTP请求中的JSON数据并填充ModelConfig,如果请求中没有'modelconfig'键或解析出错,则返回一个空的ModelConfig
func ModelConfigFromRequest(r *http.Request) ModelConfig {
// 初始化一个空的ModelConfig
var config ModelConfig
// 尝试解析请求体中的JSON数据
var requestData map[string]interface{}
if err := json.NewDecoder(r.Body).Decode(&requestData); err == nil {
// 检查请求数据中是否存在'modelconfig'键
if modelConfigData, ok := requestData["options"].(map[string]interface{}); ok {
// 尝试将modelconfig数据转换为ModelConfig结构体
jsonData, _ := json.Marshal(modelConfigData)
if err := json.Unmarshal(jsonData, &config); err == nil {
// 成功解析modelconfig数据到config
return config
}
}
}
// 如果没有'modelconfig'键或者解析出错,直接返回一个空的ModelConfig
return ModelConfig{}
}
func getIntInfo(val interface{}) int64 {
if val, ok := val.(float64); ok {
return int64(val)
}
return 0 // 如果键不存在或值不是期望的类型,则返回0
}

396
godo/model/op.go

@ -0,0 +1,396 @@
package model
import (
"bytes"
"crypto/sha256"
"encoding/json"
"errors"
"fmt"
"godo/libs"
"io"
"log"
"net/http"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"time"
)
type OllamaModelsList struct {
Models []OllamaModelsInfo `json:"models"`
}
type OllamaDetails struct {
ParameterSize string `json:"parameter_size"`
QuantizationLevel string `json:"quantization_level"`
}
type OllamaModelsInfo struct {
Model string `json:"model"`
Details OllamaDetails `json:"details"`
Size int64 `json:"size"`
}
type OllamaModelDetail struct {
Parameters string `json:"parameters"`
Template string `json:"template"`
Details map[string]interface{} `json:"details"`
ModelInfo map[string]interface{} `json:"model_info"`
}
type ResModelInfo struct {
Parameters string `json:"parameters"`
Template string `json:"template"`
ContextLength int64 `json:"context_length"`
EmbeddingLength int64 `json:"embedding_length"`
Size string `json:"size"`
Quant string `json:"quant"`
Desk string `json:"desk"`
Cpu string `json:"cpu"`
Gpu string `json:"gpu"`
}
type Layer struct {
MediaType string `json:"mediaType"`
Digest string `json:"digest"`
Size int64 `json:"size"`
From string `json:"from,omitempty"`
status string
}
type ManifestV2 struct {
SchemaVersion int `json:"schemaVersion"`
MediaType string `json:"mediaType"`
Config *Layer `json:"config"`
Layers []*Layer `json:"layers"`
}
type OmodelPath struct {
Space string
LibPath string
Name string
Tag string
}
const (
KB = 1 << (10 * iota)
MB
GB
)
const (
CPU_8GB = "8GB"
CPU_16GB = "16GB"
CPU_32GB = "32GB"
GPU_6GB = "6GB"
GPU_8GB = "8GB"
GPU_12GB = "12GB"
)
func humanReadableSize(size int64) string {
units := []string{"B", "KB", "MB", "GB"}
unitIndex := 0 // Start with Bytes
for size >= 1000 && unitIndex < len(units)-1 {
size /= 1000
unitIndex++
}
switch unitIndex {
case 0, 1, 2, 3: // For B, KB, and MB, keep decimal points
return fmt.Sprintf("%d%s", size, units[unitIndex])
default:
return fmt.Sprintf("%dB", size) // Fallback for sizes less than 1B or unhandled cases
}
}
func Tagshandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
var reqBodies []ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok {
reqBodies = append(reqBodies, rb)
}
return true // 继续遍历
})
// 对reqBodies按CreatedAt降序排列
sort.Slice(reqBodies, func(i, j int) bool {
return reqBodies[i].CreatedAt.After(reqBodies[j].CreatedAt) // 降序排列
})
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func ShowHandler(w http.ResponseWriter, r *http.Request) {
err := LoadConfig()
if err != nil {
libs.ErrorMsg(w, "Load config error")
return
}
model := r.URL.Query().Get("model")
if model == "" {
libs.ErrorMsg(w, "Model name is empty")
return
}
//log.Printf("ShowHandler: %s", model)
var reqBodies ReqBody
reqBodyMap.Range(func(key, value interface{}) bool {
rb, ok := value.(ReqBody)
if ok && rb.Model == model {
reqBodies = rb
return false
}
return true
})
//log.Printf("ShowHandler: %s", reqBodies)
// 设置响应内容类型为JSON
w.Header().Set("Content-Type", "application/json")
// 使用json.NewEncoder将reqBodies编码为JSON并写入响应体
if err := json.NewEncoder(w).Encode(reqBodies); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func extractParameterSize(sizeStr string, model string) (float64, bool) {
// log.Printf("extractParameterSize: %s", sizeStr)
// log.Printf("extractParameterModel: %s", model)
// 尝试直接从原始sizeStr中提取数字,包括小数
if size, err := strconv.ParseFloat(strings.TrimSuffix(sizeStr, "B"), 64); err == nil {
return size, true
}
if parts := strings.Split(model, ":"); len(parts) > 1 {
// 确保移除 "b" 或 "B" 后缀,并尝试转换为浮点数
cleanedPart := strings.TrimSuffix(strings.ToLower(parts[1]), "b")
if size, err := strconv.ParseFloat(cleanedPart, 64); err == nil {
return size, true
}
}
return 0, false
}
func parseOllamaInfo(info OllamaModelsInfo) ResModelInfo {
res := ResModelInfo{
Size: humanReadableSize(info.Size),
Quant: info.Details.QuantizationLevel,
}
res.Desk = res.Size
paramSize, ok := extractParameterSize(info.Details.ParameterSize, info.Model)
if !ok {
res.Cpu = CPU_8GB
res.Gpu = GPU_6GB
return res
}
switch {
case paramSize < 3:
res.Cpu = CPU_8GB
res.Gpu = GPU_6GB
case paramSize < 9:
res.Cpu = CPU_16GB
res.Gpu = GPU_8GB
default:
res.Cpu = CPU_32GB
res.Gpu = GPU_12GB
}
return res
}
func getOllamaModels() ([]OllamaModelsInfo, error) {
req, err := http.Get(GetOllamaUrl() + "/api/tags")
res := []OllamaModelsInfo{}
if err != nil {
return res, fmt.Errorf("failed to create request")
}
req.Header.Set("Content-Type", "application/json")
defer req.Body.Close()
body, err := io.ReadAll(req.Body)
if err != nil {
return res, fmt.Errorf("failed to read response body")
}
rest := OllamaModelsList{}
if err := json.Unmarshal(body, &rest); err != nil {
return res, fmt.Errorf("failed to unmarshal response body")
}
return rest.Models, nil
}
func setOllamaInfo(w http.ResponseWriter, r *http.Request, reqBody ReqBody) {
model := reqBody.Model
postQuery := map[string]interface{}{
"model": model,
}
url := GetOllamaUrl() + "/api/pull"
ForwardHandler(w, r, postQuery, url, "POST")
details, err := getOllamaInfo(r, model)
//log.Printf("details is %v", details)
if err != nil {
libs.ErrorMsg(w, "get ollama info error: ")
return
}
modelList, err := getOllamaModels()
if err != nil {
libs.ErrorMsg(w, "Load ollama error: ")
return
}
if len(modelList) < 1 {
libs.ErrorMsg(w, "Load ollama error: ")
return
}
for _, model := range modelList {
if model.Model == reqBody.Model {
oinfo := parseOllamaInfo(model)
architecture := details.ModelInfo["general.architecture"].(string)
contextLength := details.ModelInfo[architecture+".context_length"]
embeddingLength := details.ModelInfo[architecture+".embedding_length"]
info := map[string]interface{}{
"size": oinfo.Size,
"quant": oinfo.Quant,
"Desk": oinfo.Desk,
"cpu": oinfo.Cpu,
"gpu": oinfo.Gpu,
"pb": model.Details.ParameterSize,
"template": details.Template,
"parameters": details.Parameters,
"context_length": contextLength,
"embedding_length": embeddingLength,
}
paths, err := getManifests(model.Model)
if err != nil {
log.Printf("Error parsing Manifests: %v", err)
continue
}
reqBody.Info = info
reqBody.Paths = paths
reqBody.Status = "success"
reqBody.CreatedAt = time.Now()
if err := SetModel(reqBody); err != nil {
libs.ErrorMsg(w, "Set model error")
return
}
return
}
}
}
func getOllamaInfo(r *http.Request, model string) (OllamaModelDetail, error) {
infoQuery := map[string]interface{}{
"name": model,
}
res := OllamaModelDetail{}
url := GetOllamaUrl() + "/api/show"
payloadBytes, err := json.Marshal(infoQuery)
if err != nil {
return res, fmt.Errorf("json payload error: %w", err)
}
// 创建POST请求,复用原始请求的上下文(如Cookies)
req, err := http.NewRequestWithContext(r.Context(), "POST", url, bytes.NewBuffer(payloadBytes))
if err != nil {
return res, fmt.Errorf("couldn't create req context: %w", err)
}
req.Header.Set("Content-Type", "application/json")
// 发送请求
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return res, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return res, fmt.Errorf("couldn't read response body: %w", err)
}
if err := json.Unmarshal(body, &res); err != nil {
return res, fmt.Errorf("failed to unmarshal response body")
}
return res, nil
}
func getOpName(model string) OmodelPath {
libPath := "library"
modelName := model
modelTags := "latest"
if strings.Contains(modelName, ":") {
names := strings.Split(model, ":")
modelName = names[0]
modelTags = names[1]
}
if strings.Contains(modelName, "/") {
names := strings.Split(modelName, "/")
libPath = names[0]
modelName = names[1]
}
return OmodelPath{
Space: "registry.ollama.ai",
LibPath: libPath,
Name: modelName,
Tag: modelTags,
}
}
func getManifests(model string) ([]string, error) {
res := []string{}
opName := getOpName(model)
modelsDir, err := getOModelsDir()
if err != nil {
return res, fmt.Errorf("failed to get user home directory: %w", err)
}
manifestsFile := filepath.Join(modelsDir, "manifests", opName.Space, opName.LibPath, opName.Name, opName.Tag)
if !libs.PathExists(manifestsFile) {
return res, fmt.Errorf("failed to get manifests file: %w", err)
}
res = append(res, manifestsFile)
var manifest ManifestV2
f, err := os.Open(manifestsFile)
if err != nil {
return res, err
}
defer f.Close()
sha256sum := sha256.New()
if err := json.NewDecoder(io.TeeReader(f, sha256sum)).Decode(&manifest); err != nil {
return res, err
}
filename, err := GetBlobsPath(manifest.Config.Digest)
if err != nil {
return nil, err
}
res = append(res, filename)
for _, layer := range manifest.Layers {
filename, err := GetBlobsPath(layer.Digest)
if err != nil {
return nil, err
}
res = append(res, filename)
}
return res, nil
}
func GetBlobsPath(digest string) (string, error) {
dir, err := getOModelsDir()
if err != nil {
return "", err
}
// only accept actual sha256 digests
pattern := "^sha256[:-][0-9a-fA-F]{64}$"
re := regexp.MustCompile(pattern)
if digest != "" && !re.MatchString(digest) {
return "", errors.New("invalid digest format")
}
digest = strings.ReplaceAll(digest, ":", "-")
path := filepath.Join(dir, "blobs", digest)
return path, nil
}

55
godo/model/server.go

@ -0,0 +1,55 @@
package model
import (
"fmt"
"godo/libs"
"io"
"log"
"net/http"
"os"
"path/filepath"
)
type DownserverStucct struct {
Path string `json:"path"`
}
func DownServerHandler(w http.ResponseWriter, r *http.Request) {
filePath := r.URL.Query().Get("path")
//log.Printf("imagePath: %s", imagePath)
// 检查路径是否为空或无效
if filePath == "" {
http.Error(w, "Invalid file path", http.StatusBadRequest)
return
}
if !libs.PathExists(filePath) {
http.Error(w, "File not found", http.StatusNotFound)
return
}
// 获取文件信息以获取文件大小
fileInfo, err := os.Stat(filePath)
if err != nil {
log.Printf("Error getting file info: %v", err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return
}
// 设置响应头,指示浏览器以附件形式下载文件
w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, filepath.Base(filePath)))
w.Header().Set("Content-Type", "application/octet-stream")
w.Header().Set("Content-Length", fmt.Sprintf("%d", fileInfo.Size()))
// 读取文件并写入响应体
file, err := os.Open(filePath)
if err != nil {
log.Printf("Error opening file: %v", err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return
}
defer file.Close()
_, err = io.Copy(w, file)
if err != nil {
log.Printf("Error copying file to response: %v", err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
}
}

91
godo/model/types.go

@ -0,0 +1,91 @@
package model
import "time"
// 参数 类型 描述
// llama_model_path 字符串 LLaMA模型的文件路径。
// ngl 整数 使用的GPU层数。
// ctx_len 整数 模型操作的上下文长度。
// embedding 布尔值 是否在模型中使用嵌入。
// n_parallel 整数 并行操作的数量。
// cont_batching 布尔值 是否使用连续批处理。
// user_prompt 字符串 用于用户的提示。
// ai_prompt 字符串 用于AI助手的提示。
// system_prompt 字符串 用于系统规则的提示。
// pre_prompt 字符串 用于内部配置的提示。
// cpu_threads 整数 推理时使用的线程数(仅CPU模式)。
// n_batch 整数 提示评估步骤的批次大小。
// caching_enabled 布尔值 是否启用提示缓存。
// clean_cache_threshold 整数 触发清理缓存操作的聊天数量。
// grp_attn_n 整数 自我扩展中组注意力因子。
// grp_attn_w 整数 自我扩展中组注意力宽度。
// mlock 布尔值 在macOS中防止系统将模型交换到磁盘。
// grammar_file 字符串 通过提供语法文件路径,您可以使用GBNF语法约束采样。
// model_type 字符串 我们想要使用的模型类型:llm 或 embedding,默认值为 llm。
type ModelConfig struct {
ModelAlias string `json:"model_alias"`
PromptTemplate string `json:"prompt_template"`
LlamaModelPath string `json:"llama_model_path"` // The file path to the LLaMA model.
Mmproj string `json:"mmproj"`
ModelType string `json:"model_type"` // Model type we want to use: llm or embedding, default value is llm
CPUThreads int `json:"cpu_threads"` // The number of threads to use for inferencing (CPU MODE ONLY)
NGL int `json:"ngl"` // The number of GPU layers to use.
CtxLen int `json:"ctx_len"` // The context length for the model operations.
Embedding bool `json:"embedding"` // Whether to use embedding in the model.
UserPrompt string `json:"user_prompt"` // The prompt to use for the user.
AIPrompt string `json:"ai_prompt"` // The prompt to use for the AI assistant.
SystemPrompt string `json:"system_prompt"` // The prompt to use for system rules.
// PrePrompt string `json:"pre_prompt"` // The prompt to use for internal configuration.
// NParallel int `json:"n_parallel"` // The number of parallel operations.
// ContBatching bool `json:"cont_batching"` // Whether to use continuous batching.
// NBatch int `json:"n_batch"` // The batch size for prompt eval step
// CachingEnabled bool `json:"caching_enabled"` // To enable prompt caching or not
// CleanCacheThreshold int `json:"clean_cache_threshold"` // Number of chats that will trigger clean cache action
GrpAttnN int `json:"grp_attn_n"` // Group attention factor in self-extend
GrpAttnW int `json:"grp_attn_w"` // Group attention width in self-extend
// Mlock bool `json:"mlock"` // Prevent system swapping of the model to disk in macOS
GrammarFile string `json:"grammar_file"` // You can constrain the sampling using GBNF grammars by providing path to a grammar file
}
type FileProgress struct {
Progress float64 `json:"progress"` // 将进度改为浮点数,以百分比表示
IsFinished bool `json:"is_finished"`
Total int64 `json:"total"`
Current int64 `json:"completed"`
Status string `json:"status"`
}
type ModelStruct struct {
Model string `json:"model"`
}
type ReqBody struct {
//DownloadUrl string `json:"url"`
//Options ModelConfig `json:"options"`
Model string `json:"model"`
Url []string `json:"url"`
Engine string `json:"engine"`
Type string `json:"type"`
From string `json:"from"`
Action []string `json:"action"`
Label string `json:"label"`
Info map[string]interface{} `json:"info"`
Status string `json:"status"`
Paths []string `json:"paths"`
Params map[string]interface{} `json:"params"`
FileName string `json:"file_name"`
CreatedAt time.Time `json:"created_at"`
}
type DownloadsRequest struct {
Urls []string `json:"urls"`
Dir string `json:"model_path"`
}
// type DelBody struct {
// DownloadUrl string `json:"url"`
// ModelDir string `json:"name"`
// }
type ErrorResponse struct {
Code int `json:"code"`
Message string `json:"message"`
}
Loading…
Cancel
Save