mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-10-28 20:33:42 +08:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5aa59e297 | ||
|
|
211a862d54 | ||
|
|
c4c89e8e1b | ||
|
|
72983ac734 | ||
|
|
4d43dce64b | ||
|
|
0fa94d3c94 |
@@ -3,6 +3,7 @@ package image
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"image"
|
||||
_ "image/gif"
|
||||
_ "image/jpeg"
|
||||
@@ -44,8 +45,26 @@ func GetImageSizeFromUrl(url string) (width int, height int, err error) {
|
||||
}
|
||||
|
||||
func GetImageFromUrl(url string) (mimeType string, data string, err error) {
|
||||
|
||||
if strings.HasPrefix(url, "data:image/") {
|
||||
dataURLPattern := regexp.MustCompile(`data:image/([^;]+);base64,(.*)`)
|
||||
|
||||
matches := dataURLPattern.FindStringSubmatch(url)
|
||||
if len(matches) == 3 && matches[2] != "" {
|
||||
mimeType = "image/" + matches[1]
|
||||
data = matches[2]
|
||||
return
|
||||
}
|
||||
|
||||
err = errors.New("image base64 decode failed")
|
||||
return
|
||||
}
|
||||
|
||||
isImage, err := IsImageUrl(url)
|
||||
if !isImage {
|
||||
if err == nil {
|
||||
err = errors.New("invalid image link")
|
||||
}
|
||||
return
|
||||
}
|
||||
resp, err := http.Get(url)
|
||||
|
||||
@@ -169,3 +169,34 @@ func TestGetImageSizeFromBase64(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetImageFromUrl(t *testing.T) {
|
||||
for i, c := range cases {
|
||||
t.Run("Decode:"+strconv.Itoa(i), func(t *testing.T) {
|
||||
resp, err := http.Get(c.url)
|
||||
assert.NoError(t, err)
|
||||
defer resp.Body.Close()
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
assert.NoError(t, err)
|
||||
encoded := base64.StdEncoding.EncodeToString(data)
|
||||
|
||||
mimeType, base64Data, err := img.GetImageFromUrl(c.url)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, encoded, base64Data)
|
||||
assert.Equal(t, "image/"+c.format, mimeType)
|
||||
|
||||
encodedBase64 := "data:image/" + c.format + ";base64," + encoded
|
||||
mimeType, base64Data, err = img.GetImageFromUrl(encodedBase64)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, encoded, base64Data)
|
||||
assert.Equal(t, "image/"+c.format, mimeType)
|
||||
})
|
||||
}
|
||||
|
||||
url := "https://raw.githubusercontent.com/songquanpeng/one-api/main/README.md"
|
||||
_, _, err := img.GetImageFromUrl(url)
|
||||
assert.Error(t, err)
|
||||
encodedBase64 := "data:image/text;base64,"
|
||||
_, _, err = img.GetImageFromUrl(encodedBase64)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
@@ -93,6 +93,7 @@ var ModelRatio = map[string]float64{
|
||||
"qwen-plus": 1.4286, // ¥0.02 / 1k tokens
|
||||
"qwen-max": 1.4286, // ¥0.02 / 1k tokens
|
||||
"qwen-max-longcontext": 1.4286, // ¥0.02 / 1k tokens
|
||||
"qwen-vl-plus": 0.5715, // ¥0.008 / 1k tokens
|
||||
"text-embedding-v1": 0.05, // ¥0.0007 / 1k tokens
|
||||
"SparkDesk": 1.2858, // ¥0.018 / 1k tokens
|
||||
"360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens
|
||||
|
||||
@@ -2,7 +2,11 @@ package controller
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"one-api/common"
|
||||
"one-api/model"
|
||||
"one-api/types"
|
||||
"sort"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
@@ -25,559 +29,38 @@ type OpenAIModelPermission struct {
|
||||
}
|
||||
|
||||
type OpenAIModels struct {
|
||||
Id string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int `json:"created"`
|
||||
OwnedBy string `json:"owned_by"`
|
||||
Permission []OpenAIModelPermission `json:"permission"`
|
||||
Root string `json:"root"`
|
||||
Parent *string `json:"parent"`
|
||||
Id string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int `json:"created"`
|
||||
OwnedBy *string `json:"owned_by"`
|
||||
Permission *[]OpenAIModelPermission `json:"permission"`
|
||||
Root *string `json:"root"`
|
||||
Parent *string `json:"parent"`
|
||||
}
|
||||
|
||||
var openAIModels []OpenAIModels
|
||||
var openAIModelsMap map[string]OpenAIModels
|
||||
|
||||
func init() {
|
||||
var permission []OpenAIModelPermission
|
||||
permission = append(permission, OpenAIModelPermission{
|
||||
Id: "modelperm-LwHkVFn8AcMItP432fKKDIKJ",
|
||||
Object: "model_permission",
|
||||
Created: 1626777600,
|
||||
AllowCreateEngine: true,
|
||||
AllowSampling: true,
|
||||
AllowLogprobs: true,
|
||||
AllowSearchIndices: false,
|
||||
AllowView: true,
|
||||
AllowFineTuning: false,
|
||||
Organization: "*",
|
||||
Group: nil,
|
||||
IsBlocking: false,
|
||||
})
|
||||
// https://platform.openai.com/docs/models/model-endpoint-compatibility
|
||||
openAIModels = []OpenAIModels{
|
||||
{
|
||||
Id: "dall-e-2",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "dall-e-2",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "dall-e-3",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "dall-e-3",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "whisper-1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "whisper-1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "tts-1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "tts-1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "tts-1-1106",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "tts-1-1106",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "tts-1-hd",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "tts-1-hd",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "tts-1-hd-1106",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "tts-1-hd-1106",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-3.5-turbo",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-3.5-turbo",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-3.5-turbo-0301",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-3.5-turbo-0301",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-3.5-turbo-0613",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-3.5-turbo-0613",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-3.5-turbo-16k",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-3.5-turbo-16k",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-3.5-turbo-16k-0613",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-3.5-turbo-16k-0613",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-3.5-turbo-1106",
|
||||
Object: "model",
|
||||
Created: 1699593571,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-3.5-turbo-1106",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-3.5-turbo-instruct",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-3.5-turbo-instruct",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-4",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-4",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-4-0314",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-4-0314",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-4-0613",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-4-0613",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-4-32k",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-4-32k",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-4-32k-0314",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-4-32k-0314",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-4-32k-0613",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-4-32k-0613",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-4-1106-preview",
|
||||
Object: "model",
|
||||
Created: 1699593571,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-4-1106-preview",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gpt-4-vision-preview",
|
||||
Object: "model",
|
||||
Created: 1699593571,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "gpt-4-vision-preview",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-embedding-ada-002",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-embedding-ada-002",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-davinci-003",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-davinci-003",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-davinci-002",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-davinci-002",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-curie-001",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-curie-001",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-babbage-001",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-babbage-001",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-ada-001",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-ada-001",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-moderation-latest",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-moderation-latest",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-moderation-stable",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-moderation-stable",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-davinci-edit-001",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "text-davinci-edit-001",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "code-davinci-edit-001",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "openai",
|
||||
Permission: permission,
|
||||
Root: "code-davinci-edit-001",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "claude-instant-1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "anthropic",
|
||||
Permission: permission,
|
||||
Root: "claude-instant-1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "claude-2",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "anthropic",
|
||||
Permission: permission,
|
||||
Root: "claude-2",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "claude-2.1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "anthropic",
|
||||
Permission: permission,
|
||||
Root: "claude-2.1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "claude-2.0",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "anthropic",
|
||||
Permission: permission,
|
||||
Root: "claude-2.0",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "ERNIE-Bot",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "baidu",
|
||||
Permission: permission,
|
||||
Root: "ERNIE-Bot",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "ERNIE-Bot-turbo",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "baidu",
|
||||
Permission: permission,
|
||||
Root: "ERNIE-Bot-turbo",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "ERNIE-Bot-4",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "baidu",
|
||||
Permission: permission,
|
||||
Root: "ERNIE-Bot-4",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "Embedding-V1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "baidu",
|
||||
Permission: permission,
|
||||
Root: "Embedding-V1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "PaLM-2",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "google",
|
||||
Permission: permission,
|
||||
Root: "PaLM-2",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gemini-pro",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "google",
|
||||
Permission: permission,
|
||||
Root: "gemini-pro",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "gemini-pro-vision",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "google",
|
||||
Permission: permission,
|
||||
Root: "gemini-pro-vision",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "chatglm_turbo",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "zhipu",
|
||||
Permission: permission,
|
||||
Root: "chatglm_turbo",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "chatglm_pro",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "zhipu",
|
||||
Permission: permission,
|
||||
Root: "chatglm_pro",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "chatglm_std",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "zhipu",
|
||||
Permission: permission,
|
||||
Root: "chatglm_std",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "chatglm_lite",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "zhipu",
|
||||
Permission: permission,
|
||||
Root: "chatglm_lite",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "qwen-turbo",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "ali",
|
||||
Permission: permission,
|
||||
Root: "qwen-turbo",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "qwen-plus",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "ali",
|
||||
Permission: permission,
|
||||
Root: "qwen-plus",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "qwen-max",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "ali",
|
||||
Permission: permission,
|
||||
Root: "qwen-max",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "qwen-max-longcontext",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "ali",
|
||||
Permission: permission,
|
||||
Root: "qwen-max-longcontext",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-embedding-v1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "ali",
|
||||
Permission: permission,
|
||||
Root: "text-embedding-v1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "SparkDesk",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "xunfei",
|
||||
Permission: permission,
|
||||
Root: "SparkDesk",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "360GPT_S2_V9",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "360",
|
||||
Permission: permission,
|
||||
Root: "360GPT_S2_V9",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "embedding-bert-512-v1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "360",
|
||||
Permission: permission,
|
||||
Root: "embedding-bert-512-v1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "embedding_s1_v1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "360",
|
||||
Permission: permission,
|
||||
Root: "embedding_s1_v1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "semantic_similarity_s1_v1",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "360",
|
||||
Permission: permission,
|
||||
Root: "semantic_similarity_s1_v1",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "hunyuan",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "tencent",
|
||||
Permission: permission,
|
||||
Root: "hunyuan",
|
||||
Parent: nil,
|
||||
},
|
||||
keys := make([]string, 0, len(common.ModelRatio))
|
||||
for k := range common.ModelRatio {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
|
||||
for _, modelId := range keys {
|
||||
openAIModels = append(openAIModels, OpenAIModels{
|
||||
Id: modelId,
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: nil,
|
||||
Permission: nil,
|
||||
Root: nil,
|
||||
Parent: nil,
|
||||
})
|
||||
}
|
||||
|
||||
openAIModelsMap = make(map[string]OpenAIModels)
|
||||
for _, model := range openAIModels {
|
||||
openAIModelsMap[model.Id] = model
|
||||
@@ -585,6 +68,35 @@ func init() {
|
||||
}
|
||||
|
||||
func ListModels(c *gin.Context) {
|
||||
groupName := c.GetString("group")
|
||||
|
||||
models, err := model.CacheGetGroupModels(groupName)
|
||||
if err != nil {
|
||||
common.AbortWithMessage(c, http.StatusServiceUnavailable, err.Error())
|
||||
return
|
||||
}
|
||||
sort.Strings(models)
|
||||
|
||||
groupOpenAIModels := make([]OpenAIModels, 0, len(models))
|
||||
for _, modelId := range models {
|
||||
groupOpenAIModels = append(groupOpenAIModels, OpenAIModels{
|
||||
Id: modelId,
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: nil,
|
||||
Permission: nil,
|
||||
Root: nil,
|
||||
Parent: nil,
|
||||
})
|
||||
}
|
||||
|
||||
c.JSON(200, gin.H{
|
||||
"object": "list",
|
||||
"data": groupOpenAIModels,
|
||||
})
|
||||
}
|
||||
|
||||
func ListModelsForAdmin(c *gin.Context) {
|
||||
c.JSON(200, gin.H{
|
||||
"object": "list",
|
||||
"data": openAIModels,
|
||||
|
||||
@@ -45,6 +45,8 @@ func fetchChannel(c *gin.Context, modelName string) (channel *model.Channel, pas
|
||||
return
|
||||
}
|
||||
|
||||
c.Set("channel_id", channel.Id)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -39,6 +39,22 @@ func GetRandomSatisfiedChannel(group string, model string) (*Channel, error) {
|
||||
return &channel, err
|
||||
}
|
||||
|
||||
func GetGroupModels(group string) ([]string, error) {
|
||||
var models []string
|
||||
groupCol := "`group`"
|
||||
trueVal := "1"
|
||||
if common.UsingPostgreSQL {
|
||||
groupCol = `"group"`
|
||||
trueVal = "true"
|
||||
}
|
||||
|
||||
err := DB.Model(&Ability{}).Where(groupCol+" = ? and enabled = ? ", group, trueVal).Distinct("model").Pluck("model", &models).Error
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return models, nil
|
||||
}
|
||||
|
||||
func (channel *Channel) AddAbilities() error {
|
||||
models_ := strings.Split(channel.Models, ",")
|
||||
groups_ := strings.Split(channel.Group, ",")
|
||||
|
||||
@@ -213,3 +213,22 @@ func CacheGetRandomSatisfiedChannel(group string, model string) (*Channel, error
|
||||
idx := rand.Intn(endIdx)
|
||||
return channels[idx], nil
|
||||
}
|
||||
|
||||
func CacheGetGroupModels(group string) ([]string, error) {
|
||||
if !common.MemoryCacheEnabled {
|
||||
return GetGroupModels(group)
|
||||
}
|
||||
channelSyncLock.RLock()
|
||||
defer channelSyncLock.RUnlock()
|
||||
|
||||
groupModels := group2model2channels[group]
|
||||
if groupModels == nil {
|
||||
return nil, errors.New("group not found")
|
||||
}
|
||||
|
||||
models := make([]string, 0)
|
||||
for model := range groupModels {
|
||||
models = append(models, model)
|
||||
}
|
||||
return models, nil
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package ali
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"one-api/providers/base"
|
||||
|
||||
@@ -28,6 +29,16 @@ type AliProvider struct {
|
||||
base.BaseProvider
|
||||
}
|
||||
|
||||
func (p *AliProvider) GetFullRequestURL(requestURL string, modelName string) string {
|
||||
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
|
||||
|
||||
if modelName == "qwen-vl-plus" {
|
||||
requestURL = "/api/v1/services/aigc/multimodal-generation/generation"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s%s", baseURL, requestURL)
|
||||
}
|
||||
|
||||
// 获取请求头
|
||||
func (p *AliProvider) GetRequestHeaders() (headers map[string]string) {
|
||||
headers = make(map[string]string)
|
||||
|
||||
@@ -26,21 +26,12 @@ func (aliResponse *AliChatResponse) ResponseHandler(resp *http.Response) (OpenAI
|
||||
return
|
||||
}
|
||||
|
||||
choice := types.ChatCompletionChoice{
|
||||
Index: 0,
|
||||
Message: types.ChatCompletionMessage{
|
||||
Role: "assistant",
|
||||
Content: aliResponse.Output.Text,
|
||||
},
|
||||
FinishReason: aliResponse.Output.FinishReason,
|
||||
}
|
||||
|
||||
OpenAIResponse = types.ChatCompletionResponse{
|
||||
ID: aliResponse.RequestId,
|
||||
Object: "chat.completion",
|
||||
Created: common.GetTimestamp(),
|
||||
Model: aliResponse.Model,
|
||||
Choices: []types.ChatCompletionChoice{choice},
|
||||
Choices: aliResponse.Output.ToChatCompletionChoices(),
|
||||
Usage: &types.Usage{
|
||||
PromptTokens: aliResponse.Usage.InputTokens,
|
||||
CompletionTokens: aliResponse.Usage.OutputTokens,
|
||||
@@ -58,10 +49,31 @@ func (p *AliProvider) getChatRequestBody(request *types.ChatCompletionRequest) *
|
||||
messages := make([]AliMessage, 0, len(request.Messages))
|
||||
for i := 0; i < len(request.Messages); i++ {
|
||||
message := request.Messages[i]
|
||||
messages = append(messages, AliMessage{
|
||||
Content: message.StringContent(),
|
||||
Role: strings.ToLower(message.Role),
|
||||
})
|
||||
if request.Model != "qwen-vl-plus" {
|
||||
messages = append(messages, AliMessage{
|
||||
Content: message.StringContent(),
|
||||
Role: strings.ToLower(message.Role),
|
||||
})
|
||||
} else {
|
||||
openaiContent := message.ParseContent()
|
||||
var parts []AliMessagePart
|
||||
for _, part := range openaiContent {
|
||||
if part.Type == types.ContentTypeText {
|
||||
parts = append(parts, AliMessagePart{
|
||||
Text: part.Text,
|
||||
})
|
||||
} else if part.Type == types.ContentTypeImageURL {
|
||||
parts = append(parts, AliMessagePart{
|
||||
Image: part.ImageURL.URL,
|
||||
})
|
||||
}
|
||||
}
|
||||
messages = append(messages, AliMessage{
|
||||
Content: parts,
|
||||
Role: strings.ToLower(message.Role),
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
enableSearch := false
|
||||
@@ -77,6 +89,7 @@ func (p *AliProvider) getChatRequestBody(request *types.ChatCompletionRequest) *
|
||||
Messages: messages,
|
||||
},
|
||||
Parameters: AliParameters{
|
||||
ResultFormat: "message",
|
||||
EnableSearch: enableSearch,
|
||||
IncrementalOutput: request.Stream,
|
||||
},
|
||||
@@ -87,6 +100,7 @@ func (p *AliProvider) getChatRequestBody(request *types.ChatCompletionRequest) *
|
||||
func (p *AliProvider) ChatAction(request *types.ChatCompletionRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
|
||||
requestBody := p.getChatRequestBody(request)
|
||||
|
||||
fullRequestURL := p.GetFullRequestURL(p.ChatCompletions, request.Model)
|
||||
headers := p.GetRequestHeaders()
|
||||
if request.Stream {
|
||||
@@ -134,10 +148,15 @@ func (p *AliProvider) ChatAction(request *types.ChatCompletionRequest, isModelMa
|
||||
|
||||
// 阿里云响应转OpenAI响应
|
||||
func (p *AliProvider) streamResponseAli2OpenAI(aliResponse *AliChatResponse) *types.ChatCompletionStreamResponse {
|
||||
// chatChoice := aliResponse.Output.ToChatCompletionChoices()
|
||||
// jsonBody, _ := json.MarshalIndent(chatChoice, "", " ")
|
||||
// fmt.Println("requestBody:", string(jsonBody))
|
||||
var choice types.ChatCompletionStreamChoice
|
||||
choice.Delta.Content = aliResponse.Output.Text
|
||||
if aliResponse.Output.FinishReason != "null" {
|
||||
finishReason := aliResponse.Output.FinishReason
|
||||
choice.Index = aliResponse.Output.Choices[0].Index
|
||||
choice.Delta.Content = aliResponse.Output.Choices[0].Message.StringContent()
|
||||
// fmt.Println("choice.Delta.Content:", chatChoice[0].Message)
|
||||
if aliResponse.Output.Choices[0].FinishReason != "null" {
|
||||
finishReason := aliResponse.Output.Choices[0].FinishReason
|
||||
choice.FinishReason = &finishReason
|
||||
}
|
||||
|
||||
@@ -200,7 +219,8 @@ func (p *AliProvider) sendStreamRequest(req *http.Request, model string) (usage
|
||||
stopChan <- true
|
||||
}()
|
||||
common.SetEventStreamHeaders(p.Context)
|
||||
// lastResponseText := ""
|
||||
lastResponseText := ""
|
||||
index := 0
|
||||
p.Context.Stream(func(w io.Writer) bool {
|
||||
select {
|
||||
case data := <-dataChan:
|
||||
@@ -216,9 +236,11 @@ func (p *AliProvider) sendStreamRequest(req *http.Request, model string) (usage
|
||||
usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens
|
||||
}
|
||||
aliResponse.Model = model
|
||||
aliResponse.Output.Choices[0].Index = index
|
||||
index++
|
||||
response := p.streamResponseAli2OpenAI(&aliResponse)
|
||||
// response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
|
||||
// lastResponseText = aliResponse.Output.Text
|
||||
response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
|
||||
lastResponseText = aliResponse.Output.Choices[0].Message.StringContent()
|
||||
jsonResponse, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
common.SysError("error marshalling stream response: " + err.Error())
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
package ali
|
||||
|
||||
import (
|
||||
"one-api/types"
|
||||
)
|
||||
|
||||
type AliError struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
@@ -13,10 +17,15 @@ type AliUsage struct {
|
||||
}
|
||||
|
||||
type AliMessage struct {
|
||||
Content string `json:"content"`
|
||||
Content any `json:"content"`
|
||||
Role string `json:"role"`
|
||||
}
|
||||
|
||||
type AliMessagePart struct {
|
||||
Text string `json:"text,omitempty"`
|
||||
Image string `json:"image,omitempty"`
|
||||
}
|
||||
|
||||
type AliInput struct {
|
||||
// Prompt string `json:"prompt"`
|
||||
Messages []AliMessage `json:"messages"`
|
||||
@@ -28,6 +37,7 @@ type AliParameters struct {
|
||||
Seed uint64 `json:"seed,omitempty"`
|
||||
EnableSearch bool `json:"enable_search,omitempty"`
|
||||
IncrementalOutput bool `json:"incremental_output,omitempty"`
|
||||
ResultFormat string `json:"result_format,omitempty"`
|
||||
}
|
||||
|
||||
type AliChatRequest struct {
|
||||
@@ -36,9 +46,25 @@ type AliChatRequest struct {
|
||||
Parameters AliParameters `json:"parameters,omitempty"`
|
||||
}
|
||||
|
||||
type AliChoice struct {
|
||||
FinishReason string `json:"finish_reason"`
|
||||
Message types.ChatCompletionMessage `json:"message"`
|
||||
}
|
||||
|
||||
type AliOutput struct {
|
||||
Text string `json:"text"`
|
||||
FinishReason string `json:"finish_reason"`
|
||||
Choices []types.ChatCompletionChoice `json:"choices"`
|
||||
}
|
||||
|
||||
func (o *AliOutput) ToChatCompletionChoices() []types.ChatCompletionChoice {
|
||||
for i := range o.Choices {
|
||||
_, ok := o.Choices[i].Message.Content.(string)
|
||||
if ok {
|
||||
continue
|
||||
}
|
||||
|
||||
o.Choices[i].Message.Content = o.Choices[i].Message.ParseContent()
|
||||
}
|
||||
return o.Choices
|
||||
}
|
||||
|
||||
type AliChatResponse struct {
|
||||
|
||||
@@ -60,27 +60,27 @@ func (response *GeminiChatResponse) ResponseHandler(resp *http.Response) (OpenAI
|
||||
}
|
||||
|
||||
// Setting safety to the lowest possible values since Gemini is already powerless enough
|
||||
func (p *GeminiProvider) getChatRequestBody(request *types.ChatCompletionRequest) (requestBody *GeminiChatRequest) {
|
||||
func (p *GeminiProvider) getChatRequestBody(request *types.ChatCompletionRequest) (requestBody *GeminiChatRequest, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
geminiRequest := GeminiChatRequest{
|
||||
Contents: make([]GeminiChatContent, 0, len(request.Messages)),
|
||||
//SafetySettings: []GeminiChatSafetySettings{
|
||||
// {
|
||||
// Category: "HARM_CATEGORY_HARASSMENT",
|
||||
// Threshold: "BLOCK_ONLY_HIGH",
|
||||
// },
|
||||
// {
|
||||
// Category: "HARM_CATEGORY_HATE_SPEECH",
|
||||
// Threshold: "BLOCK_ONLY_HIGH",
|
||||
// },
|
||||
// {
|
||||
// Category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
||||
// Threshold: "BLOCK_ONLY_HIGH",
|
||||
// },
|
||||
// {
|
||||
// Category: "HARM_CATEGORY_DANGEROUS_CONTENT",
|
||||
// Threshold: "BLOCK_ONLY_HIGH",
|
||||
// },
|
||||
//},
|
||||
SafetySettings: []GeminiChatSafetySettings{
|
||||
{
|
||||
Category: "HARM_CATEGORY_HARASSMENT",
|
||||
Threshold: "BLOCK_NONE",
|
||||
},
|
||||
{
|
||||
Category: "HARM_CATEGORY_HATE_SPEECH",
|
||||
Threshold: "BLOCK_NONE",
|
||||
},
|
||||
{
|
||||
Category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
||||
Threshold: "BLOCK_NONE",
|
||||
},
|
||||
{
|
||||
Category: "HARM_CATEGORY_DANGEROUS_CONTENT",
|
||||
Threshold: "BLOCK_NONE",
|
||||
},
|
||||
},
|
||||
GenerationConfig: GeminiChatGenerationConfig{
|
||||
Temperature: request.Temperature,
|
||||
TopP: request.TopP,
|
||||
@@ -118,7 +118,10 @@ func (p *GeminiProvider) getChatRequestBody(request *types.ChatCompletionRequest
|
||||
if imageNum > GeminiVisionMaxImageNum {
|
||||
continue
|
||||
}
|
||||
mimeType, data, _ := image.GetImageFromUrl(part.ImageURL.URL)
|
||||
mimeType, data, err := image.GetImageFromUrl(part.ImageURL.URL)
|
||||
if err != nil {
|
||||
return nil, common.ErrorWrapper(err, "image_url_invalid", http.StatusBadRequest)
|
||||
}
|
||||
parts = append(parts, GeminiPart{
|
||||
InlineData: &GeminiInlineData{
|
||||
MimeType: mimeType,
|
||||
@@ -154,11 +157,14 @@ func (p *GeminiProvider) getChatRequestBody(request *types.ChatCompletionRequest
|
||||
}
|
||||
}
|
||||
|
||||
return &geminiRequest
|
||||
return &geminiRequest, nil
|
||||
}
|
||||
|
||||
func (p *GeminiProvider) ChatAction(request *types.ChatCompletionRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
requestBody := p.getChatRequestBody(request)
|
||||
requestBody, errWithCode := p.getChatRequestBody(request)
|
||||
if errWithCode != nil {
|
||||
return
|
||||
}
|
||||
fullRequestURL := p.GetFullRequestURL("generateContent", request.Model)
|
||||
headers := p.GetRequestHeaders()
|
||||
if request.Stream {
|
||||
|
||||
@@ -58,7 +58,7 @@ func (p *XunfeiProvider) getXunfeiAuthUrl(apiKey string, apiSecret string) (stri
|
||||
query := p.Context.Request.URL.Query()
|
||||
apiVersion := query.Get("api-version")
|
||||
if apiVersion == "" {
|
||||
apiVersion = p.Channel.Key
|
||||
apiVersion = p.Channel.Other
|
||||
}
|
||||
if apiVersion == "" {
|
||||
apiVersion = "v1.1"
|
||||
|
||||
@@ -67,7 +67,7 @@ func SetApiRouter(router *gin.Engine) {
|
||||
{
|
||||
channelRoute.GET("/", controller.GetAllChannels)
|
||||
channelRoute.GET("/search", controller.SearchChannels)
|
||||
channelRoute.GET("/models", controller.ListModels)
|
||||
channelRoute.GET("/models", controller.ListModelsForAdmin)
|
||||
channelRoute.GET("/:id", controller.GetChannel)
|
||||
channelRoute.GET("/test", controller.TestAllChannels)
|
||||
channelRoute.GET("/test/:id", controller.TestChannel)
|
||||
|
||||
@@ -11,7 +11,7 @@ func SetRelayRouter(router *gin.Engine) {
|
||||
router.Use(middleware.CORS())
|
||||
// https://platform.openai.com/docs/api-reference/introduction
|
||||
modelsRouter := router.Group("/v1/models")
|
||||
modelsRouter.Use(middleware.TokenAuth())
|
||||
modelsRouter.Use(middleware.TokenAuth(), middleware.Distribute())
|
||||
{
|
||||
modelsRouter.GET("", controller.ListModels)
|
||||
modelsRouter.GET("/:model", controller.RetrieveModel)
|
||||
|
||||
@@ -27,11 +27,11 @@ func (m ChatCompletionMessage) StringContent() string {
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if contentMap["type"] == "text" {
|
||||
if subStr, ok := contentMap["text"].(string); ok {
|
||||
contentStr += subStr
|
||||
}
|
||||
|
||||
if subStr, ok := contentMap["text"].(string); ok && subStr != "" {
|
||||
contentStr += subStr
|
||||
}
|
||||
|
||||
}
|
||||
return contentStr
|
||||
}
|
||||
@@ -55,23 +55,26 @@ func (m ChatCompletionMessage) ParseContent() []ChatMessagePart {
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
switch contentMap["type"] {
|
||||
case ContentTypeText:
|
||||
if subStr, ok := contentMap["text"].(string); ok {
|
||||
contentList = append(contentList, ChatMessagePart{
|
||||
Type: ContentTypeText,
|
||||
Text: subStr,
|
||||
})
|
||||
}
|
||||
case ContentTypeImageURL:
|
||||
if subObj, ok := contentMap["image_url"].(map[string]any); ok {
|
||||
contentList = append(contentList, ChatMessagePart{
|
||||
Type: ContentTypeImageURL,
|
||||
ImageURL: &ChatMessageImageURL{
|
||||
URL: subObj["url"].(string),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if subStr, ok := contentMap["text"].(string); ok && subStr != "" {
|
||||
contentList = append(contentList, ChatMessagePart{
|
||||
Type: ContentTypeText,
|
||||
Text: subStr,
|
||||
})
|
||||
} else if subObj, ok := contentMap["image_url"].(map[string]any); ok {
|
||||
contentList = append(contentList, ChatMessagePart{
|
||||
Type: ContentTypeImageURL,
|
||||
ImageURL: &ChatMessageImageURL{
|
||||
URL: subObj["url"].(string),
|
||||
},
|
||||
})
|
||||
} else if subObj, ok := contentMap["image"].(string); ok {
|
||||
contentList = append(contentList, ChatMessagePart{
|
||||
Type: ContentTypeImageURL,
|
||||
ImageURL: &ChatMessageImageURL{
|
||||
URL: subObj,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
return contentList
|
||||
|
||||
Reference in New Issue
Block a user