mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-12 03:13:41 +08:00
fix: models api return models in deactivate channels
- Enhance logging functionality by adding context support and improving debugging options. - Standardize function naming conventions across middleware to ensure consistency. - Optimize data retrieval and handling in the model controller, including caching and error management. - Simplify the bug report template to streamline the issue reporting process.
This commit is contained in:
@@ -2,8 +2,12 @@ package controller
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sort"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/middleware"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
relay "github.com/songquanpeng/one-api/relay"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
@@ -11,8 +15,6 @@ import (
|
||||
"github.com/songquanpeng/one-api/relay/channeltype"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// https://platform.openai.com/docs/api-reference/models/list
|
||||
@@ -33,16 +35,20 @@ type OpenAIModelPermission struct {
|
||||
}
|
||||
|
||||
type OpenAIModels struct {
|
||||
Id string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int `json:"created"`
|
||||
// Id model's name
|
||||
//
|
||||
// BUG: Different channels may have the same model name
|
||||
Id string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int `json:"created"`
|
||||
// OwnedBy is the channel's adaptor name
|
||||
OwnedBy string `json:"owned_by"`
|
||||
Permission []OpenAIModelPermission `json:"permission"`
|
||||
Root string `json:"root"`
|
||||
Parent *string `json:"parent"`
|
||||
}
|
||||
|
||||
var models []OpenAIModels
|
||||
var allModels []OpenAIModels
|
||||
var modelsMap map[string]OpenAIModels
|
||||
var channelId2Models map[int][]string
|
||||
|
||||
@@ -71,7 +77,7 @@ func init() {
|
||||
channelName := adaptor.GetChannelName()
|
||||
modelNames := adaptor.GetModelList()
|
||||
for _, modelName := range modelNames {
|
||||
models = append(models, OpenAIModels{
|
||||
allModels = append(allModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
@@ -88,7 +94,7 @@ func init() {
|
||||
}
|
||||
channelName, channelModelList := openai.GetCompatibleChannelMeta(channelType)
|
||||
for _, modelName := range channelModelList {
|
||||
models = append(models, OpenAIModels{
|
||||
allModels = append(allModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
@@ -100,7 +106,7 @@ func init() {
|
||||
}
|
||||
}
|
||||
modelsMap = make(map[string]OpenAIModels)
|
||||
for _, model := range models {
|
||||
for _, model := range allModels {
|
||||
modelsMap[model.Id] = model
|
||||
}
|
||||
channelId2Models = make(map[int][]string)
|
||||
@@ -125,49 +131,56 @@ func DashboardListModels(c *gin.Context) {
|
||||
func ListAllModels(c *gin.Context) {
|
||||
c.JSON(200, gin.H{
|
||||
"object": "list",
|
||||
"data": models,
|
||||
"data": allModels,
|
||||
})
|
||||
}
|
||||
|
||||
func ListModels(c *gin.Context) {
|
||||
ctx := c.Request.Context()
|
||||
var availableModels []string
|
||||
if c.GetString(ctxkey.AvailableModels) != "" {
|
||||
availableModels = strings.Split(c.GetString(ctxkey.AvailableModels), ",")
|
||||
} else {
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
userGroup, _ := model.CacheGetUserGroup(userId)
|
||||
availableModels, _ = model.CacheGetGroupModels(ctx, userGroup)
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
userGroup, err := model.CacheGetUserGroup(userId)
|
||||
if err != nil {
|
||||
middleware.AbortWithError(c, http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
modelSet := make(map[string]bool)
|
||||
for _, availableModel := range availableModels {
|
||||
modelSet[availableModel] = true
|
||||
|
||||
// Get available models with their channel names
|
||||
availableAbilities, err := model.CacheGetGroupModelsV2(c.Request.Context(), userGroup)
|
||||
if err != nil {
|
||||
middleware.AbortWithError(c, http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Create a map for quick lookup of enabled model+channel combinations
|
||||
// Only store the exact model:channel combinations from abilities
|
||||
abilityMap := make(map[string]bool)
|
||||
for _, ability := range availableAbilities {
|
||||
// Store as "modelName:channelName" for exact matching
|
||||
adaptor := relay.GetAdaptor(channeltype.ToAPIType(ability.ChannelType))
|
||||
key := ability.Model + ":" + adaptor.GetChannelName()
|
||||
abilityMap[key] = true
|
||||
}
|
||||
|
||||
// Filter models that match user's abilities with EXACT model+channel matches
|
||||
availableOpenAIModels := make([]OpenAIModels, 0)
|
||||
for _, model := range models {
|
||||
if _, ok := modelSet[model.Id]; ok {
|
||||
modelSet[model.Id] = false
|
||||
|
||||
// Only include models that have a matching model+channel combination
|
||||
for _, model := range allModels {
|
||||
key := model.Id + ":" + model.OwnedBy
|
||||
if abilityMap[key] {
|
||||
availableOpenAIModels = append(availableOpenAIModels, model)
|
||||
}
|
||||
}
|
||||
for modelName, ok := range modelSet {
|
||||
if ok {
|
||||
availableOpenAIModels = append(availableOpenAIModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
OwnedBy: "custom",
|
||||
Root: modelName,
|
||||
Parent: nil,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Sort models alphabetically for consistent presentation
|
||||
sort.Slice(availableOpenAIModels, func(i, j int) bool {
|
||||
return availableOpenAIModels[i].Id < availableOpenAIModels[j].Id
|
||||
})
|
||||
|
||||
c.JSON(200, gin.H{
|
||||
"object": "list",
|
||||
"data": availableOpenAIModels,
|
||||
})
|
||||
}
|
||||
|
||||
func RetrieveModel(c *gin.Context) {
|
||||
modelId := c.Param("model")
|
||||
if model, ok := modelsMap[modelId]; ok {
|
||||
@@ -196,7 +209,8 @@ func GetUserAvailableModels(c *gin.Context) {
|
||||
})
|
||||
return
|
||||
}
|
||||
models, err := model.CacheGetGroupModels(ctx, userGroup)
|
||||
|
||||
models, err := model.CacheGetGroupModelsV2(ctx, userGroup)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"success": false,
|
||||
@@ -204,10 +218,20 @@ func GetUserAvailableModels(c *gin.Context) {
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
var modelNames []string
|
||||
modelsMap := map[string]bool{}
|
||||
for _, model := range models {
|
||||
modelsMap[model.Model] = true
|
||||
}
|
||||
for modelName := range modelsMap {
|
||||
modelNames = append(modelNames, modelName)
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"success": true,
|
||||
"message": "",
|
||||
"data": models,
|
||||
"data": modelNames,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user