mirror of
				https://github.com/songquanpeng/one-api.git
				synced 2025-11-04 07:43:41 +08:00 
			
		
		
		
	Compare commits
	
		
			9 Commits
		
	
	
		
			v0.6.5-alp
			...
			v0.6.5-alp
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					a44fb5d482 | ||
| 
						 | 
					eec41849ec | ||
| 
						 | 
					d4347e7a35 | ||
| 
						 | 
					b50b43eb65 | ||
| 
						 | 
					348adc2b02 | ||
| 
						 | 
					dcf24b98dc | ||
| 
						 | 
					af679e04f4 | ||
| 
						 | 
					93cbca6a9f | ||
| 
						 | 
					840ef80d94 | 
@@ -73,15 +73,15 @@ var ModelRatio = map[string]float64{
 | 
			
		||||
	"claude-3-opus-20240229":   15.0 / 1000 * USD,
 | 
			
		||||
	// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7
 | 
			
		||||
	"ERNIE-4.0-8K":       0.120 * RMB,
 | 
			
		||||
	"ERNIE-Bot-8K-0922":  0.024 * RMB,
 | 
			
		||||
	"ERNIE-3.5-8K":       0.012 * RMB,
 | 
			
		||||
	"ERNIE-Lite-8K-0922": 0.008 * RMB,
 | 
			
		||||
	"ERNIE-Speed-8K":     0.004 * RMB,
 | 
			
		||||
	"ERNIE-3.5-4K-0205":  0.012 * RMB,
 | 
			
		||||
	"ERNIE-3.5-8K-0205":  0.024 * RMB,
 | 
			
		||||
	"ERNIE-3.5-8K-1222":  0.012 * RMB,
 | 
			
		||||
	"ERNIE-Lite-8K":      0.003 * RMB,
 | 
			
		||||
	"ERNIE-Bot-8K":       0.024 * RMB,
 | 
			
		||||
	"ERNIE-3.5-4K-0205":  0.012 * RMB,
 | 
			
		||||
	"ERNIE-Speed-8K":     0.004 * RMB,
 | 
			
		||||
	"ERNIE-Speed-128K":   0.004 * RMB,
 | 
			
		||||
	"ERNIE-Lite-8K-0922": 0.008 * RMB,
 | 
			
		||||
	"ERNIE-Lite-8K-0308": 0.003 * RMB,
 | 
			
		||||
	"ERNIE-Tiny-8K":      0.001 * RMB,
 | 
			
		||||
	"BLOOMZ-7B":          0.004 * RMB,
 | 
			
		||||
	"Embedding-V1":       0.002 * RMB,
 | 
			
		||||
@@ -104,11 +104,15 @@ var ModelRatio = map[string]float64{
 | 
			
		||||
	"chatglm_pro":               0.7143, // ¥0.01 / 1k tokens
 | 
			
		||||
	"chatglm_std":               0.3572, // ¥0.005 / 1k tokens
 | 
			
		||||
	"chatglm_lite":              0.1429, // ¥0.002 / 1k tokens
 | 
			
		||||
	"qwen-turbo":                0.5715, // ¥0.008 / 1k tokens  // https://help.aliyun.com/zh/dashscope/developer-reference/tongyi-thousand-questions-metering-and-billing
 | 
			
		||||
	// https://help.aliyun.com/zh/dashscope/developer-reference/tongyi-thousand-questions-metering-and-billing
 | 
			
		||||
	"qwen-turbo":                0.5715, // ¥0.008 / 1k tokens
 | 
			
		||||
	"qwen-plus":                 1.4286, // ¥0.02 / 1k tokens
 | 
			
		||||
	"qwen-max":                  1.4286, // ¥0.02 / 1k tokens
 | 
			
		||||
	"qwen-max-longcontext":      1.4286, // ¥0.02 / 1k tokens
 | 
			
		||||
	"text-embedding-v1":         0.05,   // ¥0.0007 / 1k tokens
 | 
			
		||||
	"ali-stable-diffusion-xl":   8,
 | 
			
		||||
	"ali-stable-diffusion-v1.5": 8,
 | 
			
		||||
	"wanx-v1":                   8,
 | 
			
		||||
	"SparkDesk":                 1.2858, // ¥0.018 / 1k tokens
 | 
			
		||||
	"SparkDesk-v1.1":            1.2858, // ¥0.018 / 1k tokens
 | 
			
		||||
	"SparkDesk-v2.1":            1.2858, // ¥0.018 / 1k tokens
 | 
			
		||||
 
 | 
			
		||||
@@ -5,9 +5,18 @@ import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"net"
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func IsValidSubnet(subnet string) error {
 | 
			
		||||
func splitSubnets(subnets string) []string {
 | 
			
		||||
	res := strings.Split(subnets, ",")
 | 
			
		||||
	for i := 0; i < len(res); i++ {
 | 
			
		||||
		res[i] = strings.TrimSpace(res[i])
 | 
			
		||||
	}
 | 
			
		||||
	return res
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func isValidSubnet(subnet string) error {
 | 
			
		||||
	_, _, err := net.ParseCIDR(subnet)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return fmt.Errorf("failed to parse subnet: %w", err)
 | 
			
		||||
@@ -15,7 +24,7 @@ func IsValidSubnet(subnet string) error {
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func IsIpInSubnet(ctx context.Context, ip string, subnet string) bool {
 | 
			
		||||
func isIpInSubnet(ctx context.Context, ip string, subnet string) bool {
 | 
			
		||||
	_, ipNet, err := net.ParseCIDR(subnet)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		logger.Errorf(ctx, "failed to parse subnet: %s", err.Error())
 | 
			
		||||
@@ -23,3 +32,21 @@ func IsIpInSubnet(ctx context.Context, ip string, subnet string) bool {
 | 
			
		||||
	}
 | 
			
		||||
	return ipNet.Contains(net.ParseIP(ip))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func IsValidSubnets(subnets string) error {
 | 
			
		||||
	for _, subnet := range splitSubnets(subnets) {
 | 
			
		||||
		if err := isValidSubnet(subnet); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func IsIpInSubnets(ctx context.Context, ip string, subnets string) bool {
 | 
			
		||||
	for _, subnet := range splitSubnets(subnets) {
 | 
			
		||||
		if isIpInSubnet(ctx, ip, subnet) {
 | 
			
		||||
			return true
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return false
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -13,7 +13,7 @@ func TestIsIpInSubnet(t *testing.T) {
 | 
			
		||||
	ip2 := "125.216.250.89"
 | 
			
		||||
	subnet := "192.168.0.0/24"
 | 
			
		||||
	Convey("TestIsIpInSubnet", t, func() {
 | 
			
		||||
		So(IsIpInSubnet(ctx, ip1, subnet), ShouldBeTrue)
 | 
			
		||||
		So(IsIpInSubnet(ctx, ip2, subnet), ShouldBeFalse)
 | 
			
		||||
		So(isIpInSubnet(ctx, ip1, subnet), ShouldBeTrue)
 | 
			
		||||
		So(isIpInSubnet(ctx, ip2, subnet), ShouldBeFalse)
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -41,8 +41,8 @@ type OpenAIModels struct {
 | 
			
		||||
	Parent     *string                 `json:"parent"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var openAIModels []OpenAIModels
 | 
			
		||||
var openAIModelsMap map[string]OpenAIModels
 | 
			
		||||
var models []OpenAIModels
 | 
			
		||||
var modelsMap map[string]OpenAIModels
 | 
			
		||||
var channelId2Models map[int][]string
 | 
			
		||||
 | 
			
		||||
func init() {
 | 
			
		||||
@@ -70,7 +70,7 @@ func init() {
 | 
			
		||||
		channelName := adaptor.GetChannelName()
 | 
			
		||||
		modelNames := adaptor.GetModelList()
 | 
			
		||||
		for _, modelName := range modelNames {
 | 
			
		||||
			openAIModels = append(openAIModels, OpenAIModels{
 | 
			
		||||
			models = append(models, OpenAIModels{
 | 
			
		||||
				Id:         modelName,
 | 
			
		||||
				Object:     "model",
 | 
			
		||||
				Created:    1626777600,
 | 
			
		||||
@@ -87,7 +87,7 @@ func init() {
 | 
			
		||||
		}
 | 
			
		||||
		channelName, channelModelList := openai.GetCompatibleChannelMeta(channelType)
 | 
			
		||||
		for _, modelName := range channelModelList {
 | 
			
		||||
			openAIModels = append(openAIModels, OpenAIModels{
 | 
			
		||||
			models = append(models, OpenAIModels{
 | 
			
		||||
				Id:         modelName,
 | 
			
		||||
				Object:     "model",
 | 
			
		||||
				Created:    1626777600,
 | 
			
		||||
@@ -98,9 +98,9 @@ func init() {
 | 
			
		||||
			})
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	openAIModelsMap = make(map[string]OpenAIModels)
 | 
			
		||||
	for _, model := range openAIModels {
 | 
			
		||||
		openAIModelsMap[model.Id] = model
 | 
			
		||||
	modelsMap = make(map[string]OpenAIModels)
 | 
			
		||||
	for _, model := range models {
 | 
			
		||||
		modelsMap[model.Id] = model
 | 
			
		||||
	}
 | 
			
		||||
	channelId2Models = make(map[int][]string)
 | 
			
		||||
	for i := 1; i < common.ChannelTypeDummy; i++ {
 | 
			
		||||
@@ -121,6 +121,13 @@ func DashboardListModels(c *gin.Context) {
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ListAllModels(c *gin.Context) {
 | 
			
		||||
	c.JSON(200, gin.H{
 | 
			
		||||
		"object": "list",
 | 
			
		||||
		"data":   models,
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ListModels(c *gin.Context) {
 | 
			
		||||
	ctx := c.Request.Context()
 | 
			
		||||
	var availableModels []string
 | 
			
		||||
@@ -136,7 +143,7 @@ func ListModels(c *gin.Context) {
 | 
			
		||||
		modelSet[availableModel] = true
 | 
			
		||||
	}
 | 
			
		||||
	availableOpenAIModels := make([]OpenAIModels, 0)
 | 
			
		||||
	for _, model := range openAIModels {
 | 
			
		||||
	for _, model := range models {
 | 
			
		||||
		if _, ok := modelSet[model.Id]; ok {
 | 
			
		||||
			modelSet[model.Id] = false
 | 
			
		||||
			availableOpenAIModels = append(availableOpenAIModels, model)
 | 
			
		||||
@@ -162,7 +169,7 @@ func ListModels(c *gin.Context) {
 | 
			
		||||
 | 
			
		||||
func RetrieveModel(c *gin.Context) {
 | 
			
		||||
	modelId := c.Param("model")
 | 
			
		||||
	if model, ok := openAIModelsMap[modelId]; ok {
 | 
			
		||||
	if model, ok := modelsMap[modelId]; ok {
 | 
			
		||||
		c.JSON(200, model)
 | 
			
		||||
	} else {
 | 
			
		||||
		Error := relaymodel.Error{
 | 
			
		||||
 
 | 
			
		||||
@@ -111,7 +111,7 @@ func validateToken(c *gin.Context, token model.Token) error {
 | 
			
		||||
		return fmt.Errorf("令牌名称过长")
 | 
			
		||||
	}
 | 
			
		||||
	if token.Subnet != nil && *token.Subnet != "" {
 | 
			
		||||
		err := network.IsValidSubnet(*token.Subnet)
 | 
			
		||||
		err := network.IsValidSubnets(*token.Subnet)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return fmt.Errorf("无效的网段:%s", err.Error())
 | 
			
		||||
		}
 | 
			
		||||
 
 | 
			
		||||
@@ -102,7 +102,7 @@ func TokenAuth() func(c *gin.Context) {
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
		if token.Subnet != nil && *token.Subnet != "" {
 | 
			
		||||
			if !network.IsIpInSubnet(ctx, c.ClientIP(), *token.Subnet) {
 | 
			
		||||
			if !network.IsIpInSubnets(ctx, c.ClientIP(), *token.Subnet) {
 | 
			
		||||
				abortWithMessage(c, http.StatusForbidden, fmt.Sprintf("该令牌只能在指定网段使用:%s,当前 ip:%s", *token.Subnet, c.ClientIP()))
 | 
			
		||||
				return
 | 
			
		||||
			}
 | 
			
		||||
@@ -117,7 +117,7 @@ func TokenAuth() func(c *gin.Context) {
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
		requestModel, err := getRequestModel(c)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
		if err != nil && !strings.HasPrefix(c.Request.URL.Path, "/v1/models") {
 | 
			
		||||
			abortWithMessage(c, http.StatusBadRequest, err.Error())
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
 
 | 
			
		||||
@@ -38,6 +38,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	return aiProxyLibraryRequest, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -23,10 +23,16 @@ func (a *Adaptor) Init(meta *util.RelayMeta) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *util.RelayMeta) (string, error) {
 | 
			
		||||
	fullRequestURL := fmt.Sprintf("%s/api/v1/services/aigc/text-generation/generation", meta.BaseURL)
 | 
			
		||||
	if meta.Mode == constant.RelayModeEmbeddings {
 | 
			
		||||
	fullRequestURL := ""
 | 
			
		||||
	switch meta.Mode {
 | 
			
		||||
	case constant.RelayModeEmbeddings:
 | 
			
		||||
		fullRequestURL = fmt.Sprintf("%s/api/v1/services/embeddings/text-embedding/text-embedding", meta.BaseURL)
 | 
			
		||||
	case constant.RelayModeImagesGenerations:
 | 
			
		||||
		fullRequestURL = fmt.Sprintf("%s/api/v1/services/aigc/text2image/image-synthesis", meta.BaseURL)
 | 
			
		||||
	default:
 | 
			
		||||
		fullRequestURL = fmt.Sprintf("%s/api/v1/services/aigc/text-generation/generation", meta.BaseURL)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return fullRequestURL, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -34,10 +40,12 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *ut
 | 
			
		||||
	channel.SetupCommonRequestHeader(c, req, meta)
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		req.Header.Set("Accept", "text/event-stream")
 | 
			
		||||
		req.Header.Set("X-DashScope-SSE", "enable")
 | 
			
		||||
	}
 | 
			
		||||
	req.Header.Set("Authorization", "Bearer "+meta.APIKey)
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		req.Header.Set("X-DashScope-SSE", "enable")
 | 
			
		||||
 | 
			
		||||
	if meta.Mode == constant.RelayModeImagesGenerations {
 | 
			
		||||
		req.Header.Set("X-DashScope-Async", "enable")
 | 
			
		||||
	}
 | 
			
		||||
	if c.GetString(common.ConfigKeyPlugin) != "" {
 | 
			
		||||
		req.Header.Set("X-DashScope-Plugin", c.GetString(common.ConfigKeyPlugin))
 | 
			
		||||
@@ -51,14 +59,23 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	}
 | 
			
		||||
	switch relayMode {
 | 
			
		||||
	case constant.RelayModeEmbeddings:
 | 
			
		||||
		baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
 | 
			
		||||
		return baiduEmbeddingRequest, nil
 | 
			
		||||
		aliEmbeddingRequest := ConvertEmbeddingRequest(*request)
 | 
			
		||||
		return aliEmbeddingRequest, nil
 | 
			
		||||
	default:
 | 
			
		||||
		baiduRequest := ConvertRequest(*request)
 | 
			
		||||
		return baiduRequest, nil
 | 
			
		||||
		aliRequest := ConvertRequest(*request)
 | 
			
		||||
		return aliRequest, nil
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	aliRequest := ConvertImageRequest(*request)
 | 
			
		||||
	return aliRequest, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
@@ -70,6 +87,8 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *util.Rel
 | 
			
		||||
		switch meta.Mode {
 | 
			
		||||
		case constant.RelayModeEmbeddings:
 | 
			
		||||
			err, usage = EmbeddingHandler(c, resp)
 | 
			
		||||
		case constant.RelayModeImagesGenerations:
 | 
			
		||||
			err, usage = ImageHandler(c, resp)
 | 
			
		||||
		default:
 | 
			
		||||
			err, usage = Handler(c, resp)
 | 
			
		||||
		}
 | 
			
		||||
 
 | 
			
		||||
@@ -3,4 +3,5 @@ package ali
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"qwen-turbo", "qwen-plus", "qwen-max", "qwen-max-longcontext",
 | 
			
		||||
	"text-embedding-v1",
 | 
			
		||||
	"ali-stable-diffusion-xl", "ali-stable-diffusion-v1.5", "wanx-v1",
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										192
									
								
								relay/channel/ali/image.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										192
									
								
								relay/channel/ali/image.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,192 @@
 | 
			
		||||
package ali
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"encoding/base64"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/channel/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"time"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func ImageHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	apiKey := c.Request.Header.Get("Authorization")
 | 
			
		||||
	apiKey = strings.TrimPrefix(apiKey, "Bearer ")
 | 
			
		||||
	responseFormat := c.GetString("response_format")
 | 
			
		||||
 | 
			
		||||
	var aliTaskResponse TaskResponse
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = json.Unmarshal(responseBody, &aliTaskResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if aliTaskResponse.Message != "" {
 | 
			
		||||
		logger.SysError("aliAsyncTask err: " + string(responseBody))
 | 
			
		||||
		return openai.ErrorWrapper(errors.New(aliTaskResponse.Message), "ali_async_task_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	aliResponse, _, err := asyncTaskWait(aliTaskResponse.Output.TaskId, apiKey)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "ali_async_task_wait_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if aliResponse.Output.TaskStatus != "SUCCEEDED" {
 | 
			
		||||
		return &model.ErrorWithStatusCode{
 | 
			
		||||
			Error: model.Error{
 | 
			
		||||
				Message: aliResponse.Output.Message,
 | 
			
		||||
				Type:    "ali_error",
 | 
			
		||||
				Param:   "",
 | 
			
		||||
				Code:    aliResponse.Output.Code,
 | 
			
		||||
			},
 | 
			
		||||
			StatusCode: resp.StatusCode,
 | 
			
		||||
		}, nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	fullTextResponse := responseAli2OpenAIImage(aliResponse, responseFormat)
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	return nil, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func asyncTask(taskID string, key string) (*TaskResponse, error, []byte) {
 | 
			
		||||
	url := fmt.Sprintf("https://dashscope.aliyuncs.com/api/v1/tasks/%s", taskID)
 | 
			
		||||
 | 
			
		||||
	var aliResponse TaskResponse
 | 
			
		||||
 | 
			
		||||
	req, err := http.NewRequest("GET", url, nil)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return &aliResponse, err, nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	req.Header.Set("Authorization", "Bearer "+key)
 | 
			
		||||
 | 
			
		||||
	client := &http.Client{}
 | 
			
		||||
	resp, err := client.Do(req)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		logger.SysError("aliAsyncTask client.Do err: " + err.Error())
 | 
			
		||||
		return &aliResponse, err, nil
 | 
			
		||||
	}
 | 
			
		||||
	defer resp.Body.Close()
 | 
			
		||||
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
 | 
			
		||||
	var response TaskResponse
 | 
			
		||||
	err = json.Unmarshal(responseBody, &response)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		logger.SysError("aliAsyncTask NewDecoder err: " + err.Error())
 | 
			
		||||
		return &aliResponse, err, nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return &response, nil, responseBody
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func asyncTaskWait(taskID string, key string) (*TaskResponse, []byte, error) {
 | 
			
		||||
	waitSeconds := 2
 | 
			
		||||
	step := 0
 | 
			
		||||
	maxStep := 20
 | 
			
		||||
 | 
			
		||||
	var taskResponse TaskResponse
 | 
			
		||||
	var responseBody []byte
 | 
			
		||||
 | 
			
		||||
	for {
 | 
			
		||||
		step++
 | 
			
		||||
		rsp, err, body := asyncTask(taskID, key)
 | 
			
		||||
		responseBody = body
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return &taskResponse, responseBody, err
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		if rsp.Output.TaskStatus == "" {
 | 
			
		||||
			return &taskResponse, responseBody, nil
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		switch rsp.Output.TaskStatus {
 | 
			
		||||
		case "FAILED":
 | 
			
		||||
			fallthrough
 | 
			
		||||
		case "CANCELED":
 | 
			
		||||
			fallthrough
 | 
			
		||||
		case "SUCCEEDED":
 | 
			
		||||
			fallthrough
 | 
			
		||||
		case "UNKNOWN":
 | 
			
		||||
			return rsp, responseBody, nil
 | 
			
		||||
		}
 | 
			
		||||
		if step >= maxStep {
 | 
			
		||||
			break
 | 
			
		||||
		}
 | 
			
		||||
		time.Sleep(time.Duration(waitSeconds) * time.Second)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return nil, nil, fmt.Errorf("aliAsyncTaskWait timeout")
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func responseAli2OpenAIImage(response *TaskResponse, responseFormat string) *openai.ImageResponse {
 | 
			
		||||
	imageResponse := openai.ImageResponse{
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	for _, data := range response.Output.Results {
 | 
			
		||||
		var b64Json string
 | 
			
		||||
		if responseFormat == "b64_json" {
 | 
			
		||||
			// 读取 data.Url 的图片数据并转存到 b64Json
 | 
			
		||||
			imageData, err := getImageData(data.Url)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				// 处理获取图片数据失败的情况
 | 
			
		||||
				logger.SysError("getImageData Error getting image data: " + err.Error())
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			// 将图片数据转为 Base64 编码的字符串
 | 
			
		||||
			b64Json = Base64Encode(imageData)
 | 
			
		||||
		} else {
 | 
			
		||||
			// 如果 responseFormat 不是 "b64_json",则直接使用 data.B64Image
 | 
			
		||||
			b64Json = data.B64Image
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		imageResponse.Data = append(imageResponse.Data, openai.ImageData{
 | 
			
		||||
			Url:           data.Url,
 | 
			
		||||
			B64Json:       b64Json,
 | 
			
		||||
			RevisedPrompt: "",
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
	return &imageResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func getImageData(url string) ([]byte, error) {
 | 
			
		||||
	response, err := http.Get(url)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
	defer response.Body.Close()
 | 
			
		||||
 | 
			
		||||
	imageData, err := io.ReadAll(response.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, err
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return imageData, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Base64Encode(data []byte) string {
 | 
			
		||||
	b64Json := base64.StdEncoding.EncodeToString(data)
 | 
			
		||||
	return b64Json
 | 
			
		||||
}
 | 
			
		||||
@@ -66,6 +66,17 @@ func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingReque
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ConvertImageRequest(request model.ImageRequest) *ImageRequest {
 | 
			
		||||
	var imageRequest ImageRequest
 | 
			
		||||
	imageRequest.Input.Prompt = request.Prompt
 | 
			
		||||
	imageRequest.Model = request.Model
 | 
			
		||||
	imageRequest.Parameters.Size = strings.Replace(request.Size, "x", "*", -1)
 | 
			
		||||
	imageRequest.Parameters.N = request.N
 | 
			
		||||
	imageRequest.ResponseFormat = request.ResponseFormat
 | 
			
		||||
 | 
			
		||||
	return &imageRequest
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	var aliResponse EmbeddingResponse
 | 
			
		||||
	err := json.NewDecoder(resp.Body).Decode(&aliResponse)
 | 
			
		||||
 
 | 
			
		||||
@@ -33,6 +33,79 @@ type ChatRequest struct {
 | 
			
		||||
	Parameters Parameters `json:"parameters,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ImageRequest struct {
 | 
			
		||||
	Model string `json:"model"`
 | 
			
		||||
	Input struct {
 | 
			
		||||
		Prompt         string `json:"prompt"`
 | 
			
		||||
		NegativePrompt string `json:"negative_prompt,omitempty"`
 | 
			
		||||
	} `json:"input"`
 | 
			
		||||
	Parameters struct {
 | 
			
		||||
		Size  string `json:"size,omitempty"`
 | 
			
		||||
		N     int    `json:"n,omitempty"`
 | 
			
		||||
		Steps string `json:"steps,omitempty"`
 | 
			
		||||
		Scale string `json:"scale,omitempty"`
 | 
			
		||||
	} `json:"parameters,omitempty"`
 | 
			
		||||
	ResponseFormat string `json:"response_format,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type TaskResponse struct {
 | 
			
		||||
	StatusCode int    `json:"status_code,omitempty"`
 | 
			
		||||
	RequestId  string `json:"request_id,omitempty"`
 | 
			
		||||
	Code       string `json:"code,omitempty"`
 | 
			
		||||
	Message    string `json:"message,omitempty"`
 | 
			
		||||
	Output     struct {
 | 
			
		||||
		TaskId     string `json:"task_id,omitempty"`
 | 
			
		||||
		TaskStatus string `json:"task_status,omitempty"`
 | 
			
		||||
		Code       string `json:"code,omitempty"`
 | 
			
		||||
		Message    string `json:"message,omitempty"`
 | 
			
		||||
		Results    []struct {
 | 
			
		||||
			B64Image string `json:"b64_image,omitempty"`
 | 
			
		||||
			Url      string `json:"url,omitempty"`
 | 
			
		||||
			Code     string `json:"code,omitempty"`
 | 
			
		||||
			Message  string `json:"message,omitempty"`
 | 
			
		||||
		} `json:"results,omitempty"`
 | 
			
		||||
		TaskMetrics struct {
 | 
			
		||||
			Total     int `json:"TOTAL,omitempty"`
 | 
			
		||||
			Succeeded int `json:"SUCCEEDED,omitempty"`
 | 
			
		||||
			Failed    int `json:"FAILED,omitempty"`
 | 
			
		||||
		} `json:"task_metrics,omitempty"`
 | 
			
		||||
	} `json:"output,omitempty"`
 | 
			
		||||
	Usage Usage `json:"usage"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Header struct {
 | 
			
		||||
	Action       string `json:"action,omitempty"`
 | 
			
		||||
	Streaming    string `json:"streaming,omitempty"`
 | 
			
		||||
	TaskID       string `json:"task_id,omitempty"`
 | 
			
		||||
	Event        string `json:"event,omitempty"`
 | 
			
		||||
	ErrorCode    string `json:"error_code,omitempty"`
 | 
			
		||||
	ErrorMessage string `json:"error_message,omitempty"`
 | 
			
		||||
	Attributes   any    `json:"attributes,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Payload struct {
 | 
			
		||||
	Model      string `json:"model,omitempty"`
 | 
			
		||||
	Task       string `json:"task,omitempty"`
 | 
			
		||||
	TaskGroup  string `json:"task_group,omitempty"`
 | 
			
		||||
	Function   string `json:"function,omitempty"`
 | 
			
		||||
	Parameters struct {
 | 
			
		||||
		SampleRate int     `json:"sample_rate,omitempty"`
 | 
			
		||||
		Rate       float64 `json:"rate,omitempty"`
 | 
			
		||||
		Format     string  `json:"format,omitempty"`
 | 
			
		||||
	} `json:"parameters,omitempty"`
 | 
			
		||||
	Input struct {
 | 
			
		||||
		Text string `json:"text,omitempty"`
 | 
			
		||||
	} `json:"input,omitempty"`
 | 
			
		||||
	Usage struct {
 | 
			
		||||
		Characters int `json:"characters,omitempty"`
 | 
			
		||||
	} `json:"usage,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type WSSMessage struct {
 | 
			
		||||
	Header  Header  `json:"header,omitempty"`
 | 
			
		||||
	Payload Payload `json:"payload,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type EmbeddingRequest struct {
 | 
			
		||||
	Model string `json:"model"`
 | 
			
		||||
	Input struct {
 | 
			
		||||
 
 | 
			
		||||
@@ -41,6 +41,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	return ConvertRequest(*request), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -44,17 +44,25 @@ func (a *Adaptor) GetRequestURL(meta *util.RelayMeta) (string, error) {
 | 
			
		||||
		suffix += "eb-instant"
 | 
			
		||||
	case "ERNIE-Speed":
 | 
			
		||||
		suffix += "ernie_speed"
 | 
			
		||||
	case "ERNIE-Bot-8K":
 | 
			
		||||
		suffix += "ernie_bot_8k"
 | 
			
		||||
	case "ERNIE-4.0-8K":
 | 
			
		||||
		suffix += "completions_pro"
 | 
			
		||||
	case "ERNIE-3.5-8K":
 | 
			
		||||
		suffix += "completions"
 | 
			
		||||
	case "ERNIE-3.5-8K-0205":
 | 
			
		||||
		suffix += "ernie-3.5-8k-0205"
 | 
			
		||||
	case "ERNIE-3.5-8K-1222":
 | 
			
		||||
		suffix += "ernie-3.5-8k-1222"
 | 
			
		||||
	case "ERNIE-Bot-8K":
 | 
			
		||||
		suffix += "ernie_bot_8k"
 | 
			
		||||
	case "ERNIE-3.5-4K-0205":
 | 
			
		||||
		suffix += "ernie-3.5-4k-0205"
 | 
			
		||||
	case "ERNIE-Speed-8K":
 | 
			
		||||
		suffix += "ernie_speed"
 | 
			
		||||
	case "ERNIE-Speed-128K":
 | 
			
		||||
		suffix += "ernie-speed-128k"
 | 
			
		||||
	case "ERNIE-Lite-8K":
 | 
			
		||||
	case "ERNIE-Lite-8K-0922":
 | 
			
		||||
		suffix += "eb-instant"
 | 
			
		||||
	case "ERNIE-Lite-8K-0308":
 | 
			
		||||
		suffix += "ernie-lite-8k"
 | 
			
		||||
	case "ERNIE-Tiny-8K":
 | 
			
		||||
		suffix += "ernie-tiny-8k"
 | 
			
		||||
@@ -101,6 +109,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,15 +2,15 @@ package baidu
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"ERNIE-4.0-8K",
 | 
			
		||||
	"ERNIE-Bot-8K-0922",
 | 
			
		||||
	"ERNIE-3.5-8K",
 | 
			
		||||
	"ERNIE-Lite-8K-0922",
 | 
			
		||||
	"ERNIE-Speed-8K",
 | 
			
		||||
	"ERNIE-3.5-4K-0205",
 | 
			
		||||
	"ERNIE-3.5-8K-0205",
 | 
			
		||||
	"ERNIE-3.5-8K-1222",
 | 
			
		||||
	"ERNIE-Lite-8K",
 | 
			
		||||
	"ERNIE-Bot-8K",
 | 
			
		||||
	"ERNIE-3.5-4K-0205",
 | 
			
		||||
	"ERNIE-Speed-8K",
 | 
			
		||||
	"ERNIE-Speed-128K",
 | 
			
		||||
	"ERNIE-Lite-8K-0922",
 | 
			
		||||
	"ERNIE-Lite-8K-0308",
 | 
			
		||||
	"ERNIE-Tiny-8K",
 | 
			
		||||
	"BLOOMZ-7B",
 | 
			
		||||
	"Embedding-V1",
 | 
			
		||||
 
 | 
			
		||||
@@ -42,6 +42,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	return ConvertRequest(*request), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channelhelper.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -13,6 +13,7 @@ type Adaptor interface {
 | 
			
		||||
	GetRequestURL(meta *util.RelayMeta) (string, error)
 | 
			
		||||
	SetupRequestHeader(c *gin.Context, req *http.Request, meta *util.RelayMeta) error
 | 
			
		||||
	ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error)
 | 
			
		||||
	ConvertImageRequest(request *model.ImageRequest) (any, error)
 | 
			
		||||
	DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error)
 | 
			
		||||
	DoResponse(c *gin.Context, resp *http.Response, meta *util.RelayMeta) (usage *model.Usage, err *model.ErrorWithStatusCode)
 | 
			
		||||
	GetModelList() []string
 | 
			
		||||
 
 | 
			
		||||
@@ -48,6 +48,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -7,6 +7,7 @@ import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/channel"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/channel/minimax"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/util"
 | 
			
		||||
	"io"
 | 
			
		||||
@@ -25,6 +26,13 @@ func (a *Adaptor) Init(meta *util.RelayMeta) {
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *util.RelayMeta) (string, error) {
 | 
			
		||||
	switch meta.ChannelType {
 | 
			
		||||
	case common.ChannelTypeAzure:
 | 
			
		||||
		if meta.Mode == constant.RelayModeImagesGenerations {
 | 
			
		||||
			// https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api
 | 
			
		||||
			// https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview
 | 
			
		||||
			fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.APIVersion)
 | 
			
		||||
			return fullRequestURL, nil
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
 | 
			
		||||
		requestURL := strings.Split(meta.RequestURLPath, "?")[0]
 | 
			
		||||
		requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.APIVersion)
 | 
			
		||||
@@ -63,6 +71,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
@@ -75,8 +90,13 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *util.Rel
 | 
			
		||||
			usage = ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
 | 
			
		||||
		}
 | 
			
		||||
	} else {
 | 
			
		||||
		switch meta.Mode {
 | 
			
		||||
		case constant.RelayModeImagesGenerations:
 | 
			
		||||
			err, _ = ImageHandler(c, resp)
 | 
			
		||||
		default:
 | 
			
		||||
			err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -149,3 +149,37 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
 | 
			
		||||
	}
 | 
			
		||||
	return nil, &textResponse.Usage
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ImageHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	var imageResponse ImageResponse
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = json.Unmarshal(responseBody, &imageResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
 | 
			
		||||
 | 
			
		||||
	for k, v := range resp.Header {
 | 
			
		||||
		c.Writer.Header().Set(k, v[0])
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
 | 
			
		||||
	_, err = io.Copy(c.Writer, resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return ErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	return nil, nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -110,11 +110,16 @@ type EmbeddingResponse struct {
 | 
			
		||||
	model.Usage `json:"usage"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ImageResponse struct {
 | 
			
		||||
	Created int `json:"created"`
 | 
			
		||||
	Data    []struct {
 | 
			
		||||
		Url string `json:"url"`
 | 
			
		||||
type ImageData struct {
 | 
			
		||||
	Url           string `json:"url,omitempty"`
 | 
			
		||||
	B64Json       string `json:"b64_json,omitempty"`
 | 
			
		||||
	RevisedPrompt string `json:"revised_prompt,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ImageResponse struct {
 | 
			
		||||
	Created int64       `json:"created"`
 | 
			
		||||
	Data    []ImageData `json:"data"`
 | 
			
		||||
	//model.Usage `json:"usage"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatCompletionsStreamResponseChoice struct {
 | 
			
		||||
 
 | 
			
		||||
@@ -36,6 +36,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	return ConvertRequest(*request), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -52,6 +52,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	return tencentRequest, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -38,6 +38,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	return nil, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	// xunfei's request is not http request, so we don't need to do anything here
 | 
			
		||||
	dummyResp := &http.Response{}
 | 
			
		||||
 
 | 
			
		||||
@@ -77,6 +77,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return channel.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -11,14 +11,31 @@ var DalleSizeRatios = map[string]map[string]float64{
 | 
			
		||||
		"1024x1792": 2,
 | 
			
		||||
		"1792x1024": 2,
 | 
			
		||||
	},
 | 
			
		||||
	"stable-diffusion-xl": {
 | 
			
		||||
		"512x1024":  1,
 | 
			
		||||
		"1024x768":  1,
 | 
			
		||||
		"1024x1024": 1,
 | 
			
		||||
		"576x1024":  1,
 | 
			
		||||
		"1024x576":  1,
 | 
			
		||||
	},
 | 
			
		||||
	"wanx-v1": {
 | 
			
		||||
		"1024x1024": 1,
 | 
			
		||||
		"720x1280":  1,
 | 
			
		||||
		"1280x720":  1,
 | 
			
		||||
	},
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var DalleGenerationImageAmounts = map[string][2]int{
 | 
			
		||||
	"dall-e-2":            {1, 10},
 | 
			
		||||
	"dall-e-3":            {1, 1}, // OpenAI allows n=1 currently.
 | 
			
		||||
	"stable-diffusion-xl": {1, 4}, // Ali
 | 
			
		||||
	"wanx-v1":             {1, 4}, // Ali
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var DalleImagePromptLengthLimitations = map[string]int{
 | 
			
		||||
	"dall-e-2":            1000,
 | 
			
		||||
	"dall-e-3":            4000,
 | 
			
		||||
	"stable-diffusion-xl": 4000,
 | 
			
		||||
	"wanx-v1":             4000,
 | 
			
		||||
	"cogview-3":           833,
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -36,8 +36,8 @@ func getAndValidateTextRequest(c *gin.Context, relayMode int) (*relaymodel.Gener
 | 
			
		||||
	return textRequest, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func getImageRequest(c *gin.Context, relayMode int) (*openai.ImageRequest, error) {
 | 
			
		||||
	imageRequest := &openai.ImageRequest{}
 | 
			
		||||
func getImageRequest(c *gin.Context, relayMode int) (*relaymodel.ImageRequest, error) {
 | 
			
		||||
	imageRequest := &relaymodel.ImageRequest{}
 | 
			
		||||
	err := common.UnmarshalBodyReusable(c, imageRequest)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, err
 | 
			
		||||
@@ -54,7 +54,7 @@ func getImageRequest(c *gin.Context, relayMode int) (*openai.ImageRequest, error
 | 
			
		||||
	return imageRequest, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func validateImageRequest(imageRequest *openai.ImageRequest, meta *util.RelayMeta) *relaymodel.ErrorWithStatusCode {
 | 
			
		||||
func validateImageRequest(imageRequest *relaymodel.ImageRequest, meta *util.RelayMeta) *relaymodel.ErrorWithStatusCode {
 | 
			
		||||
	// model validation
 | 
			
		||||
	_, hasValidSize := constant.DalleSizeRatios[imageRequest.Model][imageRequest.Size]
 | 
			
		||||
	if !hasValidSize {
 | 
			
		||||
@@ -77,7 +77,7 @@ func validateImageRequest(imageRequest *openai.ImageRequest, meta *util.RelayMet
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func getImageCostRatio(imageRequest *openai.ImageRequest) (float64, error) {
 | 
			
		||||
func getImageCostRatio(imageRequest *relaymodel.ImageRequest) (float64, error) {
 | 
			
		||||
	if imageRequest == nil {
 | 
			
		||||
		return 0, errors.New("imageRequest is nil")
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -6,18 +6,17 @@ import (
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/channel/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/helper"
 | 
			
		||||
	relaymodel "github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/util"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func isWithinRange(element string, value int) bool {
 | 
			
		||||
@@ -56,15 +55,6 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
		return openai.ErrorWrapper(err, "get_image_cost_ratio_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	requestURL := c.Request.URL.String()
 | 
			
		||||
	fullRequestURL := util.GetFullRequestURL(meta.BaseURL, requestURL, meta.ChannelType)
 | 
			
		||||
	if meta.ChannelType == common.ChannelTypeAzure {
 | 
			
		||||
		// https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api
 | 
			
		||||
		apiVersion := util.GetAzureAPIVersion(c)
 | 
			
		||||
		// https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview
 | 
			
		||||
		fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, imageRequest.Model, apiVersion)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	var requestBody io.Reader
 | 
			
		||||
	if isModelMapped || meta.ChannelType == common.ChannelTypeAzure { // make Azure channel request body
 | 
			
		||||
		jsonStr, err := json.Marshal(imageRequest)
 | 
			
		||||
@@ -76,6 +66,29 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
		requestBody = c.Request.Body
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	adaptor := helper.GetAdaptor(meta.APIType)
 | 
			
		||||
	if adaptor == nil {
 | 
			
		||||
		return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	switch meta.ChannelType {
 | 
			
		||||
	case common.ChannelTypeAli:
 | 
			
		||||
		fallthrough
 | 
			
		||||
	case common.ChannelTypeBaidu:
 | 
			
		||||
		fallthrough
 | 
			
		||||
	case common.ChannelTypeZhipu:
 | 
			
		||||
		finalRequest, err := adaptor.ConvertImageRequest(imageRequest)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return openai.ErrorWrapper(err, "convert_image_request_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		jsonStr, err := json.Marshal(finalRequest)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return openai.ErrorWrapper(err, "marshal_image_request_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
		requestBody = bytes.NewBuffer(jsonStr)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	modelRatio := common.GetModelRatio(imageRequest.Model)
 | 
			
		||||
	groupRatio := common.GetGroupRatio(meta.Group)
 | 
			
		||||
	ratio := modelRatio * groupRatio
 | 
			
		||||
@@ -87,36 +100,13 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
		return openai.ErrorWrapper(errors.New("user quota is not enough"), "insufficient_user_quota", http.StatusForbidden)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	req, err := http.NewRequest(c.Request.Method, fullRequestURL, requestBody)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	token := c.Request.Header.Get("Authorization")
 | 
			
		||||
	if meta.ChannelType == common.ChannelTypeAzure { // Azure authentication
 | 
			
		||||
		token = strings.TrimPrefix(token, "Bearer ")
 | 
			
		||||
		req.Header.Set("api-key", token)
 | 
			
		||||
	} else {
 | 
			
		||||
		req.Header.Set("Authorization", token)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type"))
 | 
			
		||||
	req.Header.Set("Accept", c.Request.Header.Get("Accept"))
 | 
			
		||||
 | 
			
		||||
	resp, err := util.HTTPClient.Do(req)
 | 
			
		||||
	// do request
 | 
			
		||||
	resp, err := adaptor.DoRequest(c, meta, requestBody)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		logger.Errorf(ctx, "DoRequest failed: %s", err.Error())
 | 
			
		||||
		return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	err = req.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_request_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	err = c.Request.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_request_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	var imageResponse openai.ImageResponse
 | 
			
		||||
 | 
			
		||||
	defer func(ctx context.Context) {
 | 
			
		||||
		if resp.StatusCode != http.StatusOK {
 | 
			
		||||
			return
 | 
			
		||||
@@ -139,34 +129,12 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
		}
 | 
			
		||||
	}(c.Request.Context())
 | 
			
		||||
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	err = json.Unmarshal(responseBody, &imageResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	// do response
 | 
			
		||||
	_, respErr := adaptor.DoResponse(c, resp, meta)
 | 
			
		||||
	if respErr != nil {
 | 
			
		||||
		logger.Errorf(ctx, "respErr is not nil: %+v", respErr)
 | 
			
		||||
		return respErr
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
 | 
			
		||||
 | 
			
		||||
	for k, v := range resp.Header {
 | 
			
		||||
		c.Writer.Header().Set(k, v[0])
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
 | 
			
		||||
	_, err = io.Copy(c.Writer, resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										12
									
								
								relay/model/image.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								relay/model/image.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,12 @@
 | 
			
		||||
package model
 | 
			
		||||
 | 
			
		||||
type ImageRequest struct {
 | 
			
		||||
	Model          string `json:"model"`
 | 
			
		||||
	Prompt         string `json:"prompt" binding:"required"`
 | 
			
		||||
	N              int    `json:"n,omitempty"`
 | 
			
		||||
	Size           string `json:"size,omitempty"`
 | 
			
		||||
	Quality        string `json:"quality,omitempty"`
 | 
			
		||||
	ResponseFormat string `json:"response_format,omitempty"`
 | 
			
		||||
	Style          string `json:"style,omitempty"`
 | 
			
		||||
	User           string `json:"user,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
@@ -72,7 +72,7 @@ func SetApiRouter(router *gin.Engine) {
 | 
			
		||||
		{
 | 
			
		||||
			channelRoute.GET("/", controller.GetAllChannels)
 | 
			
		||||
			channelRoute.GET("/search", controller.SearchChannels)
 | 
			
		||||
			channelRoute.GET("/models", controller.ListModels)
 | 
			
		||||
			channelRoute.GET("/models", controller.ListAllModels)
 | 
			
		||||
			channelRoute.GET("/:id", controller.GetChannel)
 | 
			
		||||
			channelRoute.GET("/test", controller.TestChannels)
 | 
			
		||||
			channelRoute.GET("/test/:id", controller.TestChannel)
 | 
			
		||||
 
 | 
			
		||||
@@ -18,7 +18,7 @@ export const snackbarConstants = {
 | 
			
		||||
    },
 | 
			
		||||
    NOTICE: {
 | 
			
		||||
      variant: 'info',
 | 
			
		||||
      autoHideDuration: 20000
 | 
			
		||||
      autoHideDuration: 7000
 | 
			
		||||
    }
 | 
			
		||||
  },
 | 
			
		||||
  Mobile: {
 | 
			
		||||
 
 | 
			
		||||
@@ -51,9 +51,9 @@ export function showError(error) {
 | 
			
		||||
 | 
			
		||||
export function showNotice(message, isHTML = false) {
 | 
			
		||||
  if (isHTML) {
 | 
			
		||||
    enqueueSnackbar(<SnackbarHTMLContent htmlContent={message} />, getSnackbarOptions('INFO'));
 | 
			
		||||
    enqueueSnackbar(<SnackbarHTMLContent htmlContent={message} />, getSnackbarOptions('NOTICE'));
 | 
			
		||||
  } else {
 | 
			
		||||
    enqueueSnackbar(message, getSnackbarOptions('INFO'));
 | 
			
		||||
    enqueueSnackbar(message, getSnackbarOptions('NOTICE'));
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -340,7 +340,9 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
 | 
			
		||||
                    },
 | 
			
		||||
                  }}
 | 
			
		||||
                >
 | 
			
		||||
                  {Object.values(CHANNEL_OPTIONS).map((option) => {
 | 
			
		||||
                  {Object.values(CHANNEL_OPTIONS).sort((a, b) => {
 | 
			
		||||
                    return a.text.localeCompare(b.text)
 | 
			
		||||
                  }).map((option) => {
 | 
			
		||||
                    return (
 | 
			
		||||
                      <MenuItem key={option.value} value={option.value}>
 | 
			
		||||
                        {option.text}
 | 
			
		||||
 
 | 
			
		||||
@@ -103,7 +103,7 @@ const EditModal = ({ open, tokenId, onCancel, onOk }) => {
 | 
			
		||||
          fontSize: "1.125rem",
 | 
			
		||||
        }}
 | 
			
		||||
      >
 | 
			
		||||
        {tokenId ? "编辑Token" : "新建Token"}
 | 
			
		||||
        {tokenId ? "编辑令牌" : "新建令牌"}
 | 
			
		||||
      </DialogTitle>
 | 
			
		||||
      <Divider />
 | 
			
		||||
      <DialogContent>
 | 
			
		||||
 
 | 
			
		||||
@@ -158,7 +158,7 @@ const EditToken = () => {
 | 
			
		||||
            <Form.Input
 | 
			
		||||
              label='IP 限制'
 | 
			
		||||
              name='subnet'
 | 
			
		||||
              placeholder={'请输入允许访问的网段,例如:192.168.0.0/24'}
 | 
			
		||||
              placeholder={'请输入允许访问的网段,例如:192.168.0.0/24,请使用英文逗号分隔多个网段'}
 | 
			
		||||
              onChange={handleInputChange}
 | 
			
		||||
              value={inputs.subnet}
 | 
			
		||||
              autoComplete='new-password'
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user