mirror of
https://github.com/songquanpeng/one-api.git
synced 2026-02-15 10:24:24 +08:00
Compare commits
6 Commits
v0.6.10-al
...
a29aacd063
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a29aacd063 | ||
|
|
04aee84485 | ||
|
|
c885953c6d | ||
|
|
b01f0aff38 | ||
|
|
e54abc0f89 | ||
|
|
076ec68989 |
@@ -5,15 +5,16 @@ COPY ./VERSION .
|
|||||||
COPY ./web .
|
COPY ./web .
|
||||||
|
|
||||||
WORKDIR /web/default
|
WORKDIR /web/default
|
||||||
|
RUN npm config set registry https://registry.npmmirror.com
|
||||||
RUN npm install
|
RUN npm install
|
||||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
||||||
|
|
||||||
WORKDIR /web/berry
|
WORKDIR /web/berry
|
||||||
RUN npm install
|
RUN npm install --force
|
||||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
||||||
|
|
||||||
WORKDIR /web/air
|
WORKDIR /web/air
|
||||||
RUN npm install
|
RUN npm install --force
|
||||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
||||||
|
|
||||||
FROM golang:alpine AS builder2
|
FROM golang:alpine AS builder2
|
||||||
@@ -22,7 +23,8 @@ RUN apk add --no-cache g++
|
|||||||
|
|
||||||
ENV GO111MODULE=on \
|
ENV GO111MODULE=on \
|
||||||
CGO_ENABLED=1 \
|
CGO_ENABLED=1 \
|
||||||
GOOS=linux
|
GOOS=linux \
|
||||||
|
GOPROXY=https://goproxy.cn,direct
|
||||||
|
|
||||||
WORKDIR /build
|
WORKDIR /build
|
||||||
ADD go.mod go.sum ./
|
ADD go.mod go.sum ./
|
||||||
|
|||||||
@@ -90,7 +90,6 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
|
|||||||
+ [x] [together.ai](https://www.together.ai/)
|
+ [x] [together.ai](https://www.together.ai/)
|
||||||
+ [x] [novita.ai](https://www.novita.ai/)
|
+ [x] [novita.ai](https://www.novita.ai/)
|
||||||
+ [x] [硅基流动 SiliconCloud](https://siliconflow.cn/siliconcloud)
|
+ [x] [硅基流动 SiliconCloud](https://siliconflow.cn/siliconcloud)
|
||||||
+ [x] [xAI](https://x.ai/)
|
|
||||||
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
||||||
3. 支持通过**负载均衡**的方式访问多个渠道。
|
3. 支持通过**负载均衡**的方式访问多个渠道。
|
||||||
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
||||||
|
|||||||
@@ -137,23 +137,3 @@ func String2Int(str string) int {
|
|||||||
}
|
}
|
||||||
return num
|
return num
|
||||||
}
|
}
|
||||||
|
|
||||||
func Float64PtrMax(p *float64, maxValue float64) *float64 {
|
|
||||||
if p == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if *p > maxValue {
|
|
||||||
return &maxValue
|
|
||||||
}
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
func Float64PtrMin(p *float64, minValue float64) *float64 {
|
|
||||||
if p == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if *p < minValue {
|
|
||||||
return &minValue
|
|
||||||
}
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -76,9 +76,9 @@ func testChannel(channel *model.Channel, request *relaymodel.GeneralOpenAIReques
|
|||||||
if len(modelNames) > 0 {
|
if len(modelNames) > 0 {
|
||||||
modelName = modelNames[0]
|
modelName = modelNames[0]
|
||||||
}
|
}
|
||||||
}
|
if modelMap != nil && modelMap[modelName] != "" {
|
||||||
if modelMap != nil && modelMap[modelName] != "" {
|
modelName = modelMap[modelName]
|
||||||
modelName = modelMap[modelName]
|
}
|
||||||
}
|
}
|
||||||
meta.OriginModelName, meta.ActualModelName = request.Model, modelName
|
meta.OriginModelName, meta.ActualModelName = request.Model, modelName
|
||||||
request.Model = modelName
|
request.Model = modelName
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
@@ -20,12 +21,18 @@ func (a *Adaptor) Init(meta *meta.Meta) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
return fmt.Sprintf("%s/api/library/ask", meta.BaseURL), nil
|
baseURL := meta.BaseURL
|
||||||
|
if strings.HasSuffix(meta.APIKey, "#vip") {
|
||||||
|
baseURL = "https://apivip.aiproxy.io"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s/api/library/ask", baseURL), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||||
adaptor.SetupCommonRequestHeader(c, req, meta)
|
adaptor.SetupCommonRequestHeader(c, req, meta)
|
||||||
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
|
apiKey := meta.APIKey
|
||||||
|
apiKey = strings.TrimSuffix(apiKey, "#vip")
|
||||||
|
req.Header.Set("Authorization", "Bearer "+apiKey)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
@@ -19,7 +20,11 @@ import (
|
|||||||
func ImageHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
|
func ImageHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||||
apiKey := c.Request.Header.Get("Authorization")
|
apiKey := c.Request.Header.Get("Authorization")
|
||||||
apiKey = strings.TrimPrefix(apiKey, "Bearer ")
|
apiKey = strings.TrimPrefix(apiKey, "Bearer ")
|
||||||
responseFormat := c.GetString("response_format")
|
|
||||||
|
var responseFormat string
|
||||||
|
if req, exists := c.Get(ctxkey.ConvertedRequest); exists {
|
||||||
|
responseFormat = req.(*ImageRequest).ResponseFormat
|
||||||
|
}
|
||||||
|
|
||||||
var aliTaskResponse TaskResponse
|
var aliTaskResponse TaskResponse
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
|
|||||||
@@ -36,7 +36,9 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
enableSearch = true
|
enableSearch = true
|
||||||
aliModel = strings.TrimSuffix(aliModel, EnableSearchModelSuffix)
|
aliModel = strings.TrimSuffix(aliModel, EnableSearchModelSuffix)
|
||||||
}
|
}
|
||||||
request.TopP = helper.Float64PtrMax(request.TopP, 0.9999)
|
if request.TopP >= 1 {
|
||||||
|
request.TopP = 0.9999
|
||||||
|
}
|
||||||
return &ChatRequest{
|
return &ChatRequest{
|
||||||
Model: aliModel,
|
Model: aliModel,
|
||||||
Input: Input{
|
Input: Input{
|
||||||
@@ -70,7 +72,7 @@ func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingReque
|
|||||||
func ConvertImageRequest(request model.ImageRequest) *ImageRequest {
|
func ConvertImageRequest(request model.ImageRequest) *ImageRequest {
|
||||||
var imageRequest ImageRequest
|
var imageRequest ImageRequest
|
||||||
imageRequest.Input.Prompt = request.Prompt
|
imageRequest.Input.Prompt = request.Prompt
|
||||||
imageRequest.Model = request.Model
|
imageRequest.Model = strings.TrimPrefix(request.Model, "ali-")
|
||||||
imageRequest.Parameters.Size = strings.Replace(request.Size, "x", "*", -1)
|
imageRequest.Parameters.Size = strings.Replace(request.Size, "x", "*", -1)
|
||||||
imageRequest.Parameters.N = request.N
|
imageRequest.Parameters.N = request.N
|
||||||
imageRequest.ResponseFormat = request.ResponseFormat
|
imageRequest.ResponseFormat = request.ResponseFormat
|
||||||
|
|||||||
@@ -16,13 +16,13 @@ type Input struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Parameters struct {
|
type Parameters struct {
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Seed uint64 `json:"seed,omitempty"`
|
Seed uint64 `json:"seed,omitempty"`
|
||||||
EnableSearch bool `json:"enable_search,omitempty"`
|
EnableSearch bool `json:"enable_search,omitempty"`
|
||||||
IncrementalOutput bool `json:"incremental_output,omitempty"`
|
IncrementalOutput bool `json:"incremental_output,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
ResultFormat string `json:"result_format,omitempty"`
|
ResultFormat string `json:"result_format,omitempty"`
|
||||||
Tools []model.Tool `json:"tools,omitempty"`
|
Tools []model.Tool `json:"tools,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,11 +3,7 @@ package anthropic
|
|||||||
var ModelList = []string{
|
var ModelList = []string{
|
||||||
"claude-instant-1.2", "claude-2.0", "claude-2.1",
|
"claude-instant-1.2", "claude-2.0", "claude-2.1",
|
||||||
"claude-3-haiku-20240307",
|
"claude-3-haiku-20240307",
|
||||||
"claude-3-5-haiku-20241022",
|
|
||||||
"claude-3-sonnet-20240229",
|
"claude-3-sonnet-20240229",
|
||||||
"claude-3-opus-20240229",
|
"claude-3-opus-20240229",
|
||||||
"claude-3-5-sonnet-20240620",
|
"claude-3-5-sonnet-20240620",
|
||||||
"claude-3-5-sonnet-20241022",
|
|
||||||
"claude-3-5-sonnet-latest",
|
|
||||||
"claude-3-5-haiku-20241022",
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -48,8 +48,8 @@ type Request struct {
|
|||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Tools []Tool `json:"tools,omitempty"`
|
Tools []Tool `json:"tools,omitempty"`
|
||||||
ToolChoice any `json:"tool_choice,omitempty"`
|
ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
|
|||||||
@@ -29,13 +29,10 @@ var AwsModelIDMap = map[string]string{
|
|||||||
"claude-instant-1.2": "anthropic.claude-instant-v1",
|
"claude-instant-1.2": "anthropic.claude-instant-v1",
|
||||||
"claude-2.0": "anthropic.claude-v2",
|
"claude-2.0": "anthropic.claude-v2",
|
||||||
"claude-2.1": "anthropic.claude-v2:1",
|
"claude-2.1": "anthropic.claude-v2:1",
|
||||||
"claude-3-haiku-20240307": "anthropic.claude-3-haiku-20240307-v1:0",
|
|
||||||
"claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229-v1:0",
|
"claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229-v1:0",
|
||||||
"claude-3-opus-20240229": "anthropic.claude-3-opus-20240229-v1:0",
|
|
||||||
"claude-3-5-sonnet-20240620": "anthropic.claude-3-5-sonnet-20240620-v1:0",
|
"claude-3-5-sonnet-20240620": "anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||||
"claude-3-5-sonnet-20241022": "anthropic.claude-3-5-sonnet-20241022-v2:0",
|
"claude-3-opus-20240229": "anthropic.claude-3-opus-20240229-v1:0",
|
||||||
"claude-3-5-sonnet-latest": "anthropic.claude-3-5-sonnet-20241022-v2:0",
|
"claude-3-haiku-20240307": "anthropic.claude-3-haiku-20240307-v1:0",
|
||||||
"claude-3-5-haiku-20241022": "anthropic.claude-3-5-haiku-20241022-v1:0",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func awsModelID(requestModel string) (string, error) {
|
func awsModelID(requestModel string) (string, error) {
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ type Request struct {
|
|||||||
Messages []anthropic.Message `json:"messages"`
|
Messages []anthropic.Message `json:"messages"`
|
||||||
System string `json:"system,omitempty"`
|
System string `json:"system,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
Tools []anthropic.Tool `json:"tools,omitempty"`
|
Tools []anthropic.Tool `json:"tools,omitempty"`
|
||||||
|
|||||||
@@ -4,10 +4,10 @@ package aws
|
|||||||
//
|
//
|
||||||
// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html
|
// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html
|
||||||
type Request struct {
|
type Request struct {
|
||||||
Prompt string `json:"prompt"`
|
Prompt string `json:"prompt"`
|
||||||
MaxGenLen int `json:"max_gen_len,omitempty"`
|
MaxGenLen int `json:"max_gen_len,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Response is the response from AWS Llama3
|
// Response is the response from AWS Llama3
|
||||||
|
|||||||
@@ -35,9 +35,9 @@ type Message struct {
|
|||||||
|
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
Messages []Message `json:"messages"`
|
Messages []Message `json:"messages"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
PenaltyScore *float64 `json:"penalty_score,omitempty"`
|
PenaltyScore float64 `json:"penalty_score,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
System string `json:"system,omitempty"`
|
System string `json:"system,omitempty"`
|
||||||
DisableSearch bool `json:"disable_search,omitempty"`
|
DisableSearch bool `json:"disable_search,omitempty"`
|
||||||
|
|||||||
@@ -9,5 +9,5 @@ type Request struct {
|
|||||||
Prompt string `json:"prompt,omitempty"`
|
Prompt string `json:"prompt,omitempty"`
|
||||||
Raw bool `json:"raw,omitempty"`
|
Raw bool `json:"raw,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
|||||||
K: textRequest.TopK,
|
K: textRequest.TopK,
|
||||||
Stream: textRequest.Stream,
|
Stream: textRequest.Stream,
|
||||||
FrequencyPenalty: textRequest.FrequencyPenalty,
|
FrequencyPenalty: textRequest.FrequencyPenalty,
|
||||||
PresencePenalty: textRequest.PresencePenalty,
|
PresencePenalty: textRequest.FrequencyPenalty,
|
||||||
Seed: int(textRequest.Seed),
|
Seed: int(textRequest.Seed),
|
||||||
}
|
}
|
||||||
if cohereRequest.Model == "" {
|
if cohereRequest.Model == "" {
|
||||||
|
|||||||
@@ -10,15 +10,15 @@ type Request struct {
|
|||||||
PromptTruncation string `json:"prompt_truncation,omitempty"` // 默认值为"AUTO"
|
PromptTruncation string `json:"prompt_truncation,omitempty"` // 默认值为"AUTO"
|
||||||
Connectors []Connector `json:"connectors,omitempty"`
|
Connectors []Connector `json:"connectors,omitempty"`
|
||||||
Documents []Document `json:"documents,omitempty"`
|
Documents []Document `json:"documents,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"` // 默认值为0.3
|
Temperature float64 `json:"temperature,omitempty"` // 默认值为0.3
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
MaxInputTokens int `json:"max_input_tokens,omitempty"`
|
MaxInputTokens int `json:"max_input_tokens,omitempty"`
|
||||||
K int `json:"k,omitempty"` // 默认值为0
|
K int `json:"k,omitempty"` // 默认值为0
|
||||||
P *float64 `json:"p,omitempty"` // 默认值为0.75
|
P float64 `json:"p,omitempty"` // 默认值为0.75
|
||||||
Seed int `json:"seed,omitempty"`
|
Seed int `json:"seed,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` // 默认值为0.0
|
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` // 默认值为0.0
|
||||||
PresencePenalty *float64 `json:"presence_penalty,omitempty"` // 默认值为0.0
|
PresencePenalty float64 `json:"presence_penalty,omitempty"` // 默认值为0.0
|
||||||
Tools []Tool `json:"tools,omitempty"`
|
Tools []Tool `json:"tools,omitempty"`
|
||||||
ToolResults []ToolResult `json:"tool_results,omitempty"`
|
ToolResults []ToolResult `json:"tool_results,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,12 +4,11 @@ import (
|
|||||||
"bufio"
|
"bufio"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/songquanpeng/one-api/common/render"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/songquanpeng/one-api/common/render"
|
|
||||||
|
|
||||||
"github.com/songquanpeng/one-api/common"
|
"github.com/songquanpeng/one-api/common"
|
||||||
"github.com/songquanpeng/one-api/common/config"
|
"github.com/songquanpeng/one-api/common/config"
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
@@ -29,11 +28,6 @@ const (
|
|||||||
VisionMaxImageNum = 16
|
VisionMaxImageNum = 16
|
||||||
)
|
)
|
||||||
|
|
||||||
var mimeTypeMap = map[string]string{
|
|
||||||
"json_object": "application/json",
|
|
||||||
"text": "text/plain",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Setting safety to the lowest possible values since Gemini is already powerless enough
|
// Setting safety to the lowest possible values since Gemini is already powerless enough
|
||||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
||||||
geminiRequest := ChatRequest{
|
geminiRequest := ChatRequest{
|
||||||
@@ -62,15 +56,6 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
MaxOutputTokens: textRequest.MaxTokens,
|
MaxOutputTokens: textRequest.MaxTokens,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
if textRequest.ResponseFormat != nil {
|
|
||||||
if mimeType, ok := mimeTypeMap[textRequest.ResponseFormat.Type]; ok {
|
|
||||||
geminiRequest.GenerationConfig.ResponseMimeType = mimeType
|
|
||||||
}
|
|
||||||
if textRequest.ResponseFormat.JsonSchema != nil {
|
|
||||||
geminiRequest.GenerationConfig.ResponseSchema = textRequest.ResponseFormat.JsonSchema.Schema
|
|
||||||
geminiRequest.GenerationConfig.ResponseMimeType = mimeTypeMap["json_object"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if textRequest.Tools != nil {
|
if textRequest.Tools != nil {
|
||||||
functions := make([]model.Function, 0, len(textRequest.Tools))
|
functions := make([]model.Function, 0, len(textRequest.Tools))
|
||||||
for _, tool := range textRequest.Tools {
|
for _, tool := range textRequest.Tools {
|
||||||
|
|||||||
@@ -65,12 +65,10 @@ type ChatTools struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ChatGenerationConfig struct {
|
type ChatGenerationConfig struct {
|
||||||
ResponseMimeType string `json:"responseMimeType,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
ResponseSchema any `json:"responseSchema,omitempty"`
|
TopP float64 `json:"topP,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
TopK float64 `json:"topK,omitempty"`
|
||||||
TopP *float64 `json:"topP,omitempty"`
|
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
||||||
TopK float64 `json:"topK,omitempty"`
|
CandidateCount int `json:"candidateCount,omitempty"`
|
||||||
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
StopSequences []string `json:"stopSequences,omitempty"`
|
||||||
CandidateCount int `json:"candidateCount,omitempty"`
|
|
||||||
StopSequences []string `json:"stopSequences,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,24 +4,14 @@ package groq
|
|||||||
|
|
||||||
var ModelList = []string{
|
var ModelList = []string{
|
||||||
"gemma-7b-it",
|
"gemma-7b-it",
|
||||||
|
"mixtral-8x7b-32768",
|
||||||
|
"llama3-8b-8192",
|
||||||
|
"llama3-70b-8192",
|
||||||
"gemma2-9b-it",
|
"gemma2-9b-it",
|
||||||
|
"llama-3.1-405b-reasoning",
|
||||||
"llama-3.1-70b-versatile",
|
"llama-3.1-70b-versatile",
|
||||||
"llama-3.1-8b-instant",
|
"llama-3.1-8b-instant",
|
||||||
"llama-3.2-11b-text-preview",
|
|
||||||
"llama-3.2-11b-vision-preview",
|
|
||||||
"llama-3.2-1b-preview",
|
|
||||||
"llama-3.2-3b-preview",
|
|
||||||
"llama-3.2-11b-vision-preview",
|
|
||||||
"llama-3.2-90b-text-preview",
|
|
||||||
"llama-3.2-90b-vision-preview",
|
|
||||||
"llama-guard-3-8b",
|
|
||||||
"llama3-70b-8192",
|
|
||||||
"llama3-8b-8192",
|
|
||||||
"llama3-groq-70b-8192-tool-use-preview",
|
"llama3-groq-70b-8192-tool-use-preview",
|
||||||
"llama3-groq-8b-8192-tool-use-preview",
|
"llama3-groq-8b-8192-tool-use-preview",
|
||||||
"llava-v1.5-7b-4096-preview",
|
|
||||||
"mixtral-8x7b-32768",
|
|
||||||
"distil-whisper-large-v3-en",
|
|
||||||
"whisper-large-v3",
|
"whisper-large-v3",
|
||||||
"whisper-large-v3-turbo",
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
package ollama
|
package ollama
|
||||||
|
|
||||||
type Options struct {
|
type Options struct {
|
||||||
Seed int `json:"seed,omitempty"`
|
Seed int `json:"seed,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
|
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
|
||||||
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
|
PresencePenalty float64 `json:"presence_penalty,omitempty"`
|
||||||
NumPredict int `json:"num_predict,omitempty"`
|
NumPredict int `json:"num_predict,omitempty"`
|
||||||
NumCtx int `json:"num_ctx,omitempty"`
|
NumCtx int `json:"num_ctx,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Message struct {
|
type Message struct {
|
||||||
|
|||||||
@@ -50,6 +50,11 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
|||||||
return minimax.GetRequestURL(meta)
|
return minimax.GetRequestURL(meta)
|
||||||
case channeltype.Doubao:
|
case channeltype.Doubao:
|
||||||
return doubao.GetRequestURL(meta)
|
return doubao.GetRequestURL(meta)
|
||||||
|
case channeltype.AIProxy:
|
||||||
|
if strings.HasSuffix(meta.APIKey, "#vip") {
|
||||||
|
return GetFullRequestURL("https://apivip.aiproxy.io", meta.RequestURLPath, meta.ChannelType), nil
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
case channeltype.Novita:
|
case channeltype.Novita:
|
||||||
return novita.GetRequestURL(meta)
|
return novita.GetRequestURL(meta)
|
||||||
default:
|
default:
|
||||||
@@ -63,7 +68,11 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
|
|||||||
req.Header.Set("api-key", meta.APIKey)
|
req.Header.Set("api-key", meta.APIKey)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
|
apiKey := meta.APIKey
|
||||||
|
if meta.ChannelType == channeltype.AIProxy {
|
||||||
|
apiKey = strings.TrimSuffix(apiKey, "#vip")
|
||||||
|
}
|
||||||
|
req.Header.Set("Authorization", "Bearer "+apiKey)
|
||||||
if meta.ChannelType == channeltype.OpenRouter {
|
if meta.ChannelType == channeltype.OpenRouter {
|
||||||
req.Header.Set("HTTP-Referer", "https://github.com/songquanpeng/one-api")
|
req.Header.Set("HTTP-Referer", "https://github.com/songquanpeng/one-api")
|
||||||
req.Header.Set("X-Title", "One API")
|
req.Header.Set("X-Title", "One API")
|
||||||
@@ -75,13 +84,6 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
|||||||
if request == nil {
|
if request == nil {
|
||||||
return nil, errors.New("request is nil")
|
return nil, errors.New("request is nil")
|
||||||
}
|
}
|
||||||
if request.Stream {
|
|
||||||
// always return usage in stream mode
|
|
||||||
if request.StreamOptions == nil {
|
|
||||||
request.StreamOptions = &model.StreamOptions{}
|
|
||||||
}
|
|
||||||
request.StreamOptions.IncludeUsage = true
|
|
||||||
}
|
|
||||||
return request, nil
|
return request, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,10 +11,9 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/adaptor/mistral"
|
"github.com/songquanpeng/one-api/relay/adaptor/mistral"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/moonshot"
|
"github.com/songquanpeng/one-api/relay/adaptor/moonshot"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/novita"
|
"github.com/songquanpeng/one-api/relay/adaptor/novita"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/siliconflow"
|
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/stepfun"
|
"github.com/songquanpeng/one-api/relay/adaptor/stepfun"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/togetherai"
|
"github.com/songquanpeng/one-api/relay/adaptor/togetherai"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/xai"
|
"github.com/songquanpeng/one-api/relay/adaptor/siliconflow"
|
||||||
"github.com/songquanpeng/one-api/relay/channeltype"
|
"github.com/songquanpeng/one-api/relay/channeltype"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -33,7 +32,6 @@ var CompatibleChannels = []int{
|
|||||||
channeltype.TogetherAI,
|
channeltype.TogetherAI,
|
||||||
channeltype.Novita,
|
channeltype.Novita,
|
||||||
channeltype.SiliconFlow,
|
channeltype.SiliconFlow,
|
||||||
channeltype.XAI,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetCompatibleChannelMeta(channelType int) (string, []string) {
|
func GetCompatibleChannelMeta(channelType int) (string, []string) {
|
||||||
@@ -66,8 +64,6 @@ func GetCompatibleChannelMeta(channelType int) (string, []string) {
|
|||||||
return "novita", novita.ModelList
|
return "novita", novita.ModelList
|
||||||
case channeltype.SiliconFlow:
|
case channeltype.SiliconFlow:
|
||||||
return "siliconflow", siliconflow.ModelList
|
return "siliconflow", siliconflow.ModelList
|
||||||
case channeltype.XAI:
|
|
||||||
return "xai", xai.ModelList
|
|
||||||
default:
|
default:
|
||||||
return "openai", ModelList
|
return "openai", ModelList
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,11 +19,11 @@ type Prompt struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
Prompt Prompt `json:"prompt"`
|
Prompt Prompt `json:"prompt"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
CandidateCount int `json:"candidateCount,omitempty"`
|
CandidateCount int `json:"candidateCount,omitempty"`
|
||||||
TopP *float64 `json:"topP,omitempty"`
|
TopP float64 `json:"topP,omitempty"`
|
||||||
TopK int `json:"topK,omitempty"`
|
TopK int `json:"topK,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Error struct {
|
type Error struct {
|
||||||
|
|||||||
@@ -39,8 +39,8 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
Model: &request.Model,
|
Model: &request.Model,
|
||||||
Stream: &request.Stream,
|
Stream: &request.Stream,
|
||||||
Messages: messages,
|
Messages: messages,
|
||||||
TopP: request.TopP,
|
TopP: &request.TopP,
|
||||||
Temperature: request.Temperature,
|
Temperature: &request.Temperature,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,12 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var ModelList = []string{
|
var ModelList = []string{
|
||||||
"claude-3-haiku@20240307",
|
"claude-3-haiku@20240307", "claude-3-opus@20240229", "claude-3-5-sonnet@20240620", "claude-3-sonnet@20240229",
|
||||||
"claude-3-sonnet@20240229",
|
|
||||||
"claude-3-opus@20240229",
|
|
||||||
"claude-3-5-sonnet@20240620",
|
|
||||||
"claude-3-5-sonnet-v2@20241022",
|
|
||||||
"claude-3-5-haiku@20241022",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const anthropicVersion = "vertex-2023-10-16"
|
const anthropicVersion = "vertex-2023-10-16"
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ type Request struct {
|
|||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Tools []anthropic.Tool `json:"tools,omitempty"`
|
Tools []anthropic.Tool `json:"tools,omitempty"`
|
||||||
ToolChoice any `json:"tool_choice,omitempty"`
|
ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var ModelList = []string{
|
var ModelList = []string{
|
||||||
"gemini-1.5-pro-001", "gemini-1.5-flash-001", "gemini-pro", "gemini-pro-vision", "gemini-1.5-pro-002", "gemini-1.5-flash-002",
|
"gemini-1.5-pro-001", "gemini-1.5-flash-001", "gemini-pro", "gemini-pro-vision",
|
||||||
}
|
}
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
package xai
|
|
||||||
|
|
||||||
var ModelList = []string{
|
|
||||||
"grok-beta",
|
|
||||||
}
|
|
||||||
@@ -7,6 +7,5 @@ var ModelList = []string{
|
|||||||
"SparkDesk-v3.1",
|
"SparkDesk-v3.1",
|
||||||
"SparkDesk-v3.1-128K",
|
"SparkDesk-v3.1-128K",
|
||||||
"SparkDesk-v3.5",
|
"SparkDesk-v3.5",
|
||||||
"SparkDesk-v3.5-32K",
|
|
||||||
"SparkDesk-v4.0",
|
"SparkDesk-v4.0",
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -283,7 +283,7 @@ func parseAPIVersionByModelName(modelName string) string {
|
|||||||
func apiVersion2domain(apiVersion string) string {
|
func apiVersion2domain(apiVersion string) string {
|
||||||
switch apiVersion {
|
switch apiVersion {
|
||||||
case "v1.1":
|
case "v1.1":
|
||||||
return "lite"
|
return "general"
|
||||||
case "v2.1":
|
case "v2.1":
|
||||||
return "generalv2"
|
return "generalv2"
|
||||||
case "v3.1":
|
case "v3.1":
|
||||||
@@ -292,8 +292,6 @@ func apiVersion2domain(apiVersion string) string {
|
|||||||
return "pro-128k"
|
return "pro-128k"
|
||||||
case "v3.5":
|
case "v3.5":
|
||||||
return "generalv3.5"
|
return "generalv3.5"
|
||||||
case "v3.5-32K":
|
|
||||||
return "max-32k"
|
|
||||||
case "v4.0":
|
case "v4.0":
|
||||||
return "4.0Ultra"
|
return "4.0Ultra"
|
||||||
}
|
}
|
||||||
@@ -305,10 +303,7 @@ func getXunfeiAuthUrl(apiVersion string, apiKey string, apiSecret string) (strin
|
|||||||
domain := apiVersion2domain(apiVersion)
|
domain := apiVersion2domain(apiVersion)
|
||||||
switch apiVersion {
|
switch apiVersion {
|
||||||
case "v3.1-128K":
|
case "v3.1-128K":
|
||||||
authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/chat/pro-128k"), apiKey, apiSecret)
|
authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/pro-128k", apiVersion), apiKey, apiSecret)
|
||||||
break
|
|
||||||
case "v3.5-32K":
|
|
||||||
authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/chat/max-32k"), apiKey, apiSecret)
|
|
||||||
break
|
break
|
||||||
default:
|
default:
|
||||||
authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
|
authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
|
||||||
|
|||||||
@@ -19,11 +19,11 @@ type ChatRequest struct {
|
|||||||
} `json:"header"`
|
} `json:"header"`
|
||||||
Parameter struct {
|
Parameter struct {
|
||||||
Chat struct {
|
Chat struct {
|
||||||
Domain string `json:"domain,omitempty"`
|
Domain string `json:"domain,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
Auditing bool `json:"auditing,omitempty"`
|
Auditing bool `json:"auditing,omitempty"`
|
||||||
} `json:"chat"`
|
} `json:"chat"`
|
||||||
} `json:"parameter"`
|
} `json:"parameter"`
|
||||||
Payload struct {
|
Payload struct {
|
||||||
|
|||||||
@@ -4,13 +4,13 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||||
"io"
|
"io"
|
||||||
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
@@ -65,13 +65,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
|||||||
baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request)
|
baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request)
|
||||||
return baiduEmbeddingRequest, err
|
return baiduEmbeddingRequest, err
|
||||||
default:
|
default:
|
||||||
// TopP [0.0, 1.0]
|
// TopP (0.0, 1.0)
|
||||||
request.TopP = helper.Float64PtrMax(request.TopP, 1)
|
request.TopP = math.Min(0.99, request.TopP)
|
||||||
request.TopP = helper.Float64PtrMin(request.TopP, 0)
|
request.TopP = math.Max(0.01, request.TopP)
|
||||||
|
|
||||||
// Temperature [0.0, 1.0]
|
// Temperature (0.0, 1.0)
|
||||||
request.Temperature = helper.Float64PtrMax(request.Temperature, 1)
|
request.Temperature = math.Min(0.99, request.Temperature)
|
||||||
request.Temperature = helper.Float64PtrMin(request.Temperature, 0)
|
request.Temperature = math.Max(0.01, request.Temperature)
|
||||||
a.SetVersionByModeName(request.Model)
|
a.SetVersionByModeName(request.Model)
|
||||||
if a.APIVersion == "v4" {
|
if a.APIVersion == "v4" {
|
||||||
return request, nil
|
return request, nil
|
||||||
|
|||||||
@@ -12,8 +12,8 @@ type Message struct {
|
|||||||
|
|
||||||
type Request struct {
|
type Request struct {
|
||||||
Prompt []Message `json:"prompt"`
|
Prompt []Message `json:"prompt"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
RequestId string `json:"request_id,omitempty"`
|
RequestId string `json:"request_id,omitempty"`
|
||||||
Incremental bool `json:"incremental,omitempty"`
|
Incremental bool `json:"incremental,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -61,6 +61,4 @@ var ImagePromptLengthLimitations = map[string]int{
|
|||||||
}
|
}
|
||||||
|
|
||||||
var ImageOriginModelName = map[string]string{
|
var ImageOriginModelName = map[string]string{
|
||||||
"ali-stable-diffusion-xl": "stable-diffusion-xl",
|
|
||||||
"ali-stable-diffusion-v1.5": "stable-diffusion-v1.5",
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -79,10 +79,8 @@ var ModelRatio = map[string]float64{
|
|||||||
"claude-2.0": 8.0 / 1000 * USD,
|
"claude-2.0": 8.0 / 1000 * USD,
|
||||||
"claude-2.1": 8.0 / 1000 * USD,
|
"claude-2.1": 8.0 / 1000 * USD,
|
||||||
"claude-3-haiku-20240307": 0.25 / 1000 * USD,
|
"claude-3-haiku-20240307": 0.25 / 1000 * USD,
|
||||||
"claude-3-5-haiku-20241022": 1.0 / 1000 * USD,
|
|
||||||
"claude-3-sonnet-20240229": 3.0 / 1000 * USD,
|
"claude-3-sonnet-20240229": 3.0 / 1000 * USD,
|
||||||
"claude-3-5-sonnet-20240620": 3.0 / 1000 * USD,
|
"claude-3-5-sonnet-20240620": 3.0 / 1000 * USD,
|
||||||
"claude-3-5-sonnet-20241022": 3.0 / 1000 * USD,
|
|
||||||
"claude-3-opus-20240229": 15.0 / 1000 * USD,
|
"claude-3-opus-20240229": 15.0 / 1000 * USD,
|
||||||
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7
|
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7
|
||||||
"ERNIE-4.0-8K": 0.120 * RMB,
|
"ERNIE-4.0-8K": 0.120 * RMB,
|
||||||
@@ -132,7 +130,6 @@ var ModelRatio = map[string]float64{
|
|||||||
"SparkDesk-v3.1": 1.2858, // ¥0.018 / 1k tokens
|
"SparkDesk-v3.1": 1.2858, // ¥0.018 / 1k tokens
|
||||||
"SparkDesk-v3.1-128K": 1.2858, // ¥0.018 / 1k tokens
|
"SparkDesk-v3.1-128K": 1.2858, // ¥0.018 / 1k tokens
|
||||||
"SparkDesk-v3.5": 1.2858, // ¥0.018 / 1k tokens
|
"SparkDesk-v3.5": 1.2858, // ¥0.018 / 1k tokens
|
||||||
"SparkDesk-v3.5-32K": 1.2858, // ¥0.018 / 1k tokens
|
|
||||||
"SparkDesk-v4.0": 1.2858, // ¥0.018 / 1k tokens
|
"SparkDesk-v4.0": 1.2858, // ¥0.018 / 1k tokens
|
||||||
"360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens
|
"360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens
|
||||||
"embedding-bert-512-v1": 0.0715, // ¥0.001 / 1k tokens
|
"embedding-bert-512-v1": 0.0715, // ¥0.001 / 1k tokens
|
||||||
@@ -164,21 +161,15 @@ var ModelRatio = map[string]float64{
|
|||||||
"mistral-embed": 0.1 / 1000 * USD,
|
"mistral-embed": 0.1 / 1000 * USD,
|
||||||
// https://wow.groq.com/#:~:text=inquiries%C2%A0here.-,Model,-Current%20Speed
|
// https://wow.groq.com/#:~:text=inquiries%C2%A0here.-,Model,-Current%20Speed
|
||||||
"gemma-7b-it": 0.07 / 1000000 * USD,
|
"gemma-7b-it": 0.07 / 1000000 * USD,
|
||||||
|
"mixtral-8x7b-32768": 0.24 / 1000000 * USD,
|
||||||
|
"llama3-8b-8192": 0.05 / 1000000 * USD,
|
||||||
|
"llama3-70b-8192": 0.59 / 1000000 * USD,
|
||||||
"gemma2-9b-it": 0.20 / 1000000 * USD,
|
"gemma2-9b-it": 0.20 / 1000000 * USD,
|
||||||
|
"llama-3.1-405b-reasoning": 0.89 / 1000000 * USD,
|
||||||
"llama-3.1-70b-versatile": 0.59 / 1000000 * USD,
|
"llama-3.1-70b-versatile": 0.59 / 1000000 * USD,
|
||||||
"llama-3.1-8b-instant": 0.05 / 1000000 * USD,
|
"llama-3.1-8b-instant": 0.05 / 1000000 * USD,
|
||||||
"llama-3.2-11b-text-preview": 0.05 / 1000000 * USD,
|
|
||||||
"llama-3.2-11b-vision-preview": 0.05 / 1000000 * USD,
|
|
||||||
"llama-3.2-1b-preview": 0.05 / 1000000 * USD,
|
|
||||||
"llama-3.2-3b-preview": 0.05 / 1000000 * USD,
|
|
||||||
"llama-3.2-90b-text-preview": 0.59 / 1000000 * USD,
|
|
||||||
"llama-guard-3-8b": 0.05 / 1000000 * USD,
|
|
||||||
"llama3-70b-8192": 0.59 / 1000000 * USD,
|
|
||||||
"llama3-8b-8192": 0.05 / 1000000 * USD,
|
|
||||||
"llama3-groq-70b-8192-tool-use-preview": 0.89 / 1000000 * USD,
|
"llama3-groq-70b-8192-tool-use-preview": 0.89 / 1000000 * USD,
|
||||||
"llama3-groq-8b-8192-tool-use-preview": 0.19 / 1000000 * USD,
|
"llama3-groq-8b-8192-tool-use-preview": 0.19 / 1000000 * USD,
|
||||||
"mixtral-8x7b-32768": 0.24 / 1000000 * USD,
|
|
||||||
|
|
||||||
// https://platform.lingyiwanwu.com/docs#-计费单元
|
// https://platform.lingyiwanwu.com/docs#-计费单元
|
||||||
"yi-34b-chat-0205": 2.5 / 1000 * RMB,
|
"yi-34b-chat-0205": 2.5 / 1000 * RMB,
|
||||||
"yi-34b-chat-200k": 12.0 / 1000 * RMB,
|
"yi-34b-chat-200k": 12.0 / 1000 * RMB,
|
||||||
@@ -209,8 +200,6 @@ var ModelRatio = map[string]float64{
|
|||||||
"deepl-zh": 25.0 / 1000 * USD,
|
"deepl-zh": 25.0 / 1000 * USD,
|
||||||
"deepl-en": 25.0 / 1000 * USD,
|
"deepl-en": 25.0 / 1000 * USD,
|
||||||
"deepl-ja": 25.0 / 1000 * USD,
|
"deepl-ja": 25.0 / 1000 * USD,
|
||||||
// https://console.x.ai/
|
|
||||||
"grok-beta": 5.0 / 1000 * USD,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var CompletionRatio = map[string]float64{
|
var CompletionRatio = map[string]float64{
|
||||||
@@ -375,8 +364,6 @@ func GetCompletionRatio(name string, channelType int) float64 {
|
|||||||
return 3
|
return 3
|
||||||
case "command-r-plus":
|
case "command-r-plus":
|
||||||
return 5
|
return 5
|
||||||
case "grok-beta":
|
|
||||||
return 3
|
|
||||||
}
|
}
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,6 +46,5 @@ const (
|
|||||||
VertextAI
|
VertextAI
|
||||||
Proxy
|
Proxy
|
||||||
SiliconFlow
|
SiliconFlow
|
||||||
XAI
|
|
||||||
Dummy
|
Dummy
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -45,8 +45,7 @@ var ChannelBaseURLs = []string{
|
|||||||
"https://api.novita.ai/v3/openai", // 41
|
"https://api.novita.ai/v3/openai", // 41
|
||||||
"", // 42
|
"", // 42
|
||||||
"", // 43
|
"", // 43
|
||||||
"https://api.siliconflow.cn", // 44
|
"https://api.siliconflow.cn", // 44
|
||||||
"https://api.x.ai", // 45
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|||||||
@@ -164,6 +164,7 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return openai.ErrorWrapper(err, "marshal_image_request_failed", http.StatusInternalServerError)
|
return openai.ErrorWrapper(err, "marshal_image_request_failed", http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
|
c.Set(ctxkey.ConvertedRequest, finalRequest)
|
||||||
requestBody = bytes.NewBuffer(jsonStr)
|
requestBody = bytes.NewBuffer(jsonStr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,13 +4,13 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
"github.com/songquanpeng/one-api/relay"
|
"github.com/songquanpeng/one-api/relay"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/apitype"
|
"github.com/songquanpeng/one-api/relay/apitype"
|
||||||
"github.com/songquanpeng/one-api/relay/billing"
|
"github.com/songquanpeng/one-api/relay/billing"
|
||||||
@@ -54,7 +54,6 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
|
|||||||
}
|
}
|
||||||
adaptor.Init(meta)
|
adaptor.Init(meta)
|
||||||
|
|
||||||
// get request body
|
|
||||||
requestBody, err := getRequestBody(c, meta, textRequest, adaptor)
|
requestBody, err := getRequestBody(c, meta, textRequest, adaptor)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return openai.ErrorWrapper(err, "convert_request_failed", http.StatusInternalServerError)
|
return openai.ErrorWrapper(err, "convert_request_failed", http.StatusInternalServerError)
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
package model
|
package model
|
||||||
|
|
||||||
const (
|
const (
|
||||||
ContentTypeText = "text"
|
ContentTypeText = "text"
|
||||||
ContentTypeImageURL = "image_url"
|
ContentTypeImageURL = "image_url"
|
||||||
ContentTypeInputAudio = "input_audio"
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -12,59 +12,32 @@ type JSONSchema struct {
|
|||||||
Strict *bool `json:"strict,omitempty"`
|
Strict *bool `json:"strict,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Audio struct {
|
|
||||||
Voice string `json:"voice,omitempty"`
|
|
||||||
Format string `json:"format,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type StreamOptions struct {
|
|
||||||
IncludeUsage bool `json:"include_usage,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type GeneralOpenAIRequest struct {
|
type GeneralOpenAIRequest struct {
|
||||||
// https://platform.openai.com/docs/api-reference/chat/create
|
Messages []Message `json:"messages,omitempty"`
|
||||||
Messages []Message `json:"messages,omitempty"`
|
Model string `json:"model,omitempty"`
|
||||||
Model string `json:"model,omitempty"`
|
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
|
||||||
Store *bool `json:"store,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
Metadata any `json:"metadata,omitempty"`
|
N int `json:"n,omitempty"`
|
||||||
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
|
PresencePenalty float64 `json:"presence_penalty,omitempty"`
|
||||||
LogitBias any `json:"logit_bias,omitempty"`
|
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
|
||||||
Logprobs *bool `json:"logprobs,omitempty"`
|
Seed float64 `json:"seed,omitempty"`
|
||||||
TopLogprobs *int `json:"top_logprobs,omitempty"`
|
Stop any `json:"stop,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
MaxCompletionTokens *int `json:"max_completion_tokens,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
N int `json:"n,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
Modalities []string `json:"modalities,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Prediction any `json:"prediction,omitempty"`
|
Tools []Tool `json:"tools,omitempty"`
|
||||||
Audio *Audio `json:"audio,omitempty"`
|
ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
|
FunctionCall any `json:"function_call,omitempty"`
|
||||||
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
|
Functions any `json:"functions,omitempty"`
|
||||||
Seed float64 `json:"seed,omitempty"`
|
User string `json:"user,omitempty"`
|
||||||
ServiceTier *string `json:"service_tier,omitempty"`
|
Prompt any `json:"prompt,omitempty"`
|
||||||
Stop any `json:"stop,omitempty"`
|
Input any `json:"input,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
EncodingFormat string `json:"encoding_format,omitempty"`
|
||||||
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
|
Dimensions int `json:"dimensions,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Instruction string `json:"instruction,omitempty"`
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
Size string `json:"size,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
NumCtx int `json:"num_ctx,omitempty"`
|
||||||
Tools []Tool `json:"tools,omitempty"`
|
|
||||||
ToolChoice any `json:"tool_choice,omitempty"`
|
|
||||||
ParallelTooCalls *bool `json:"parallel_tool_calls,omitempty"`
|
|
||||||
User string `json:"user,omitempty"`
|
|
||||||
FunctionCall any `json:"function_call,omitempty"`
|
|
||||||
Functions any `json:"functions,omitempty"`
|
|
||||||
// https://platform.openai.com/docs/api-reference/embeddings/create
|
|
||||||
Input any `json:"input,omitempty"`
|
|
||||||
EncodingFormat string `json:"encoding_format,omitempty"`
|
|
||||||
Dimensions int `json:"dimensions,omitempty"`
|
|
||||||
// https://platform.openai.com/docs/api-reference/images/create
|
|
||||||
Prompt any `json:"prompt,omitempty"`
|
|
||||||
Quality *string `json:"quality,omitempty"`
|
|
||||||
Size string `json:"size,omitempty"`
|
|
||||||
Style *string `json:"style,omitempty"`
|
|
||||||
// Others
|
|
||||||
Instruction string `json:"instruction,omitempty"`
|
|
||||||
NumCtx int `json:"num_ctx,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r GeneralOpenAIRequest) ParseInput() []string {
|
func (r GeneralOpenAIRequest) ParseInput() []string {
|
||||||
|
|||||||
@@ -395,7 +395,7 @@ const TokensTable = () => {
|
|||||||
url = mjLink + `/#/?settings={"key":"sk-${key}","url":"${serverAddress}"}`;
|
url = mjLink + `/#/?settings={"key":"sk-${key}","url":"${serverAddress}"}`;
|
||||||
break;
|
break;
|
||||||
case 'lobechat':
|
case 'lobechat':
|
||||||
url = chatLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}/v1"}}}`;
|
url = chatLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}"/v1"}}}`;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
if (!chatLink) {
|
if (!chatLink) {
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ export const CHANNEL_OPTIONS = [
|
|||||||
{ key: 42, text: 'VertexAI', value: 42, color: 'blue' },
|
{ key: 42, text: 'VertexAI', value: 42, color: 'blue' },
|
||||||
{ key: 43, text: 'Proxy', value: 43, color: 'blue' },
|
{ key: 43, text: 'Proxy', value: 43, color: 'blue' },
|
||||||
{ key: 44, text: 'SiliconFlow', value: 44, color: 'blue' },
|
{ key: 44, text: 'SiliconFlow', value: 44, color: 'blue' },
|
||||||
{ key: 45, text: 'xAI', value: 45, color: 'blue' },
|
|
||||||
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
||||||
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
||||||
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ const EditChannel = (props) => {
|
|||||||
let localModels = [];
|
let localModels = [];
|
||||||
switch (value) {
|
switch (value) {
|
||||||
case 14:
|
case 14:
|
||||||
localModels = ["claude-instant-1.2", "claude-2", "claude-2.0", "claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", "claude-3-5-haiku-20241022", "claude-3-5-sonnet-20240620", "claude-3-5-sonnet-20241022"];
|
localModels = ["claude-instant-1.2", "claude-2", "claude-2.0", "claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", "claude-3-5-sonnet-20240620"];
|
||||||
break;
|
break;
|
||||||
case 11:
|
case 11:
|
||||||
localModels = ['PaLM-2'];
|
localModels = ['PaLM-2'];
|
||||||
@@ -78,7 +78,7 @@ const EditChannel = (props) => {
|
|||||||
localModels = ['chatglm_pro', 'chatglm_std', 'chatglm_lite'];
|
localModels = ['chatglm_pro', 'chatglm_std', 'chatglm_lite'];
|
||||||
break;
|
break;
|
||||||
case 18:
|
case 18:
|
||||||
localModels = ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.1-128K', 'SparkDesk-v3.5', 'SparkDesk-v3.5-32K', 'SparkDesk-v4.0'];
|
localModels = ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.1-128K', 'SparkDesk-v3.5', 'SparkDesk-v4.0'];
|
||||||
break;
|
break;
|
||||||
case 19:
|
case 19:
|
||||||
localModels = ['360GPT_S2_V9', 'embedding-bert-512-v1', 'embedding_s1_v1', 'semantic_similarity_s1_v1'];
|
localModels = ['360GPT_S2_V9', 'embedding-bert-512-v1', 'embedding_s1_v1', 'semantic_similarity_s1_v1'];
|
||||||
|
|||||||
@@ -179,12 +179,6 @@ export const CHANNEL_OPTIONS = {
|
|||||||
value: 44,
|
value: 44,
|
||||||
color: 'primary'
|
color: 'primary'
|
||||||
},
|
},
|
||||||
45: {
|
|
||||||
key: 45,
|
|
||||||
text: 'xAI',
|
|
||||||
value: 45,
|
|
||||||
color: 'primary'
|
|
||||||
},
|
|
||||||
41: {
|
41: {
|
||||||
key: 41,
|
key: 41,
|
||||||
text: 'Novita',
|
text: 'Novita',
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ const typeConfig = {
|
|||||||
other: '版本号'
|
other: '版本号'
|
||||||
},
|
},
|
||||||
input: {
|
input: {
|
||||||
models: ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.1-128K', 'SparkDesk-v3.5', 'SparkDesk-v3.5-32K', 'SparkDesk-v4.0']
|
models: ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.1-128K', 'SparkDesk-v3.5', 'SparkDesk-v4.0']
|
||||||
},
|
},
|
||||||
prompt: {
|
prompt: {
|
||||||
key: '按照如下格式输入:APPID|APISecret|APIKey',
|
key: '按照如下格式输入:APPID|APISecret|APIKey',
|
||||||
@@ -223,9 +223,6 @@ const typeConfig = {
|
|||||||
},
|
},
|
||||||
modelGroup: 'anthropic'
|
modelGroup: 'anthropic'
|
||||||
},
|
},
|
||||||
45: {
|
|
||||||
modelGroup: 'xai'
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export { defaultConfig, typeConfig };
|
export { defaultConfig, typeConfig };
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ const COPY_OPTIONS = [
|
|||||||
},
|
},
|
||||||
{ key: 'ama', text: 'BotGem', url: 'ama://set-api-key?server={serverAddress}&key=sk-{key}', encode: true },
|
{ key: 'ama', text: 'BotGem', url: 'ama://set-api-key?server={serverAddress}&key=sk-{key}', encode: true },
|
||||||
{ key: 'opencat', text: 'OpenCat', url: 'opencat://team/join?domain={serverAddress}&token=sk-{key}', encode: true },
|
{ key: 'opencat', text: 'OpenCat', url: 'opencat://team/join?domain={serverAddress}&token=sk-{key}', encode: true },
|
||||||
{ key: 'lobechat', text: 'LobeChat', url: 'https://lobehub.com/?settings={"keyVaults":{"openai":{"apiKey":"sk-{key}","baseURL":"{serverAddress}"}}}', encode: true }
|
{ key: 'lobechat', text: 'LobeChat', url: 'https://lobehub.com/?settings={"keyVaults":{"openai":{"apiKey":"user-key","baseURL":"https://your-proxy.com/v1"}}}', encode: true }
|
||||||
];
|
];
|
||||||
|
|
||||||
function replacePlaceholders(text, key, serverAddress) {
|
function replacePlaceholders(text, key, serverAddress) {
|
||||||
|
|||||||
@@ -59,12 +59,6 @@ function renderBalance(type, balance) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function isShowDetail() {
|
|
||||||
return localStorage.getItem("show_detail") === "true";
|
|
||||||
}
|
|
||||||
|
|
||||||
const promptID = "detail"
|
|
||||||
|
|
||||||
const ChannelsTable = () => {
|
const ChannelsTable = () => {
|
||||||
const [channels, setChannels] = useState([]);
|
const [channels, setChannels] = useState([]);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
@@ -72,8 +66,7 @@ const ChannelsTable = () => {
|
|||||||
const [searchKeyword, setSearchKeyword] = useState('');
|
const [searchKeyword, setSearchKeyword] = useState('');
|
||||||
const [searching, setSearching] = useState(false);
|
const [searching, setSearching] = useState(false);
|
||||||
const [updatingBalance, setUpdatingBalance] = useState(false);
|
const [updatingBalance, setUpdatingBalance] = useState(false);
|
||||||
const [showPrompt, setShowPrompt] = useState(shouldShowPrompt(promptID));
|
const [showPrompt, setShowPrompt] = useState(shouldShowPrompt("channel-test"));
|
||||||
const [showDetail, setShowDetail] = useState(isShowDetail());
|
|
||||||
|
|
||||||
const loadChannels = async (startIdx) => {
|
const loadChannels = async (startIdx) => {
|
||||||
const res = await API.get(`/api/channel/?p=${startIdx}`);
|
const res = await API.get(`/api/channel/?p=${startIdx}`);
|
||||||
@@ -127,11 +120,6 @@ const ChannelsTable = () => {
|
|||||||
await loadChannels(activePage - 1);
|
await loadChannels(activePage - 1);
|
||||||
};
|
};
|
||||||
|
|
||||||
const toggleShowDetail = () => {
|
|
||||||
setShowDetail(!showDetail);
|
|
||||||
localStorage.setItem("show_detail", (!showDetail).toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadChannels(0)
|
loadChannels(0)
|
||||||
.then()
|
.then()
|
||||||
@@ -376,13 +364,11 @@ const ChannelsTable = () => {
|
|||||||
showPrompt && (
|
showPrompt && (
|
||||||
<Message onDismiss={() => {
|
<Message onDismiss={() => {
|
||||||
setShowPrompt(false);
|
setShowPrompt(false);
|
||||||
setPromptShown(promptID);
|
setPromptShown("channel-test");
|
||||||
}}>
|
}}>
|
||||||
OpenAI 渠道已经不再支持通过 key 获取余额,因此余额显示为 0。对于支持的渠道类型,请点击余额进行刷新。
|
OpenAI 渠道已经不再支持通过 key 获取余额,因此余额显示为 0。对于支持的渠道类型,请点击余额进行刷新。
|
||||||
<br/>
|
<br/>
|
||||||
渠道测试仅支持 chat 模型,优先使用 gpt-3.5-turbo,如果该模型不可用则使用你所配置的模型列表中的第一个模型。
|
渠道测试仅支持 chat 模型,优先使用 gpt-3.5-turbo,如果该模型不可用则使用你所配置的模型列表中的第一个模型。
|
||||||
<br/>
|
|
||||||
点击下方详情按钮可以显示余额以及设置额外的测试模型。
|
|
||||||
</Message>
|
</Message>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -442,7 +428,6 @@ const ChannelsTable = () => {
|
|||||||
onClick={() => {
|
onClick={() => {
|
||||||
sortChannel('balance');
|
sortChannel('balance');
|
||||||
}}
|
}}
|
||||||
hidden={!showDetail}
|
|
||||||
>
|
>
|
||||||
余额
|
余额
|
||||||
</Table.HeaderCell>
|
</Table.HeaderCell>
|
||||||
@@ -454,7 +439,7 @@ const ChannelsTable = () => {
|
|||||||
>
|
>
|
||||||
优先级
|
优先级
|
||||||
</Table.HeaderCell>
|
</Table.HeaderCell>
|
||||||
<Table.HeaderCell hidden={!showDetail}>测试模型</Table.HeaderCell>
|
<Table.HeaderCell>测试模型</Table.HeaderCell>
|
||||||
<Table.HeaderCell>操作</Table.HeaderCell>
|
<Table.HeaderCell>操作</Table.HeaderCell>
|
||||||
</Table.Row>
|
</Table.Row>
|
||||||
</Table.Header>
|
</Table.Header>
|
||||||
@@ -482,7 +467,7 @@ const ChannelsTable = () => {
|
|||||||
basic
|
basic
|
||||||
/>
|
/>
|
||||||
</Table.Cell>
|
</Table.Cell>
|
||||||
<Table.Cell hidden={!showDetail}>
|
<Table.Cell>
|
||||||
<Popup
|
<Popup
|
||||||
trigger={<span onClick={() => {
|
trigger={<span onClick={() => {
|
||||||
updateChannelBalance(channel.id, channel.name, idx);
|
updateChannelBalance(channel.id, channel.name, idx);
|
||||||
@@ -509,7 +494,7 @@ const ChannelsTable = () => {
|
|||||||
basic
|
basic
|
||||||
/>
|
/>
|
||||||
</Table.Cell>
|
</Table.Cell>
|
||||||
<Table.Cell hidden={!showDetail}>
|
<Table.Cell>
|
||||||
<Dropdown
|
<Dropdown
|
||||||
placeholder='请选择测试模型'
|
placeholder='请选择测试模型'
|
||||||
selection
|
selection
|
||||||
@@ -588,7 +573,7 @@ const ChannelsTable = () => {
|
|||||||
|
|
||||||
<Table.Footer>
|
<Table.Footer>
|
||||||
<Table.Row>
|
<Table.Row>
|
||||||
<Table.HeaderCell colSpan={showDetail ? "10" : "8"}>
|
<Table.HeaderCell colSpan='9'>
|
||||||
<Button size='small' as={Link} to='/channel/add' loading={loading}>
|
<Button size='small' as={Link} to='/channel/add' loading={loading}>
|
||||||
添加新的渠道
|
添加新的渠道
|
||||||
</Button>
|
</Button>
|
||||||
@@ -626,7 +611,6 @@ const ChannelsTable = () => {
|
|||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
<Button size='small' onClick={refresh} loading={loading}>刷新</Button>
|
<Button size='small' onClick={refresh} loading={loading}>刷新</Button>
|
||||||
<Button size='small' onClick={toggleShowDetail}>{showDetail ? "隐藏详情" : "详情"}</Button>
|
|
||||||
</Table.HeaderCell>
|
</Table.HeaderCell>
|
||||||
</Table.Row>
|
</Table.Row>
|
||||||
</Table.Footer>
|
</Table.Footer>
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ const TokensTable = () => {
|
|||||||
url = nextUrl;
|
url = nextUrl;
|
||||||
break;
|
break;
|
||||||
case 'lobechat':
|
case 'lobechat':
|
||||||
url = nextLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}/v1"}}}`;
|
url = nextLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}"/v1"}}}`;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
url = `sk-${key}`;
|
url = `sk-${key}`;
|
||||||
@@ -160,7 +160,7 @@ const TokensTable = () => {
|
|||||||
break;
|
break;
|
||||||
|
|
||||||
case 'lobechat':
|
case 'lobechat':
|
||||||
url = chatLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}/v1"}}}`;
|
url = chatLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}"/v1"}}}`;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ export const CHANNEL_OPTIONS = [
|
|||||||
{ key: 42, text: 'VertexAI', value: 42, color: 'blue' },
|
{ key: 42, text: 'VertexAI', value: 42, color: 'blue' },
|
||||||
{ key: 43, text: 'Proxy', value: 43, color: 'blue' },
|
{ key: 43, text: 'Proxy', value: 43, color: 'blue' },
|
||||||
{ key: 44, text: 'SiliconFlow', value: 44, color: 'blue' },
|
{ key: 44, text: 'SiliconFlow', value: 44, color: 'blue' },
|
||||||
{ key: 45, text: 'xAI', value: 45, color: 'blue' },
|
|
||||||
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
||||||
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
||||||
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import React from 'react';
|
|||||||
import { Header, Segment } from 'semantic-ui-react';
|
import { Header, Segment } from 'semantic-ui-react';
|
||||||
import ChannelsTable from '../../components/ChannelsTable';
|
import ChannelsTable from '../../components/ChannelsTable';
|
||||||
|
|
||||||
const Channel = () => (
|
const File = () => (
|
||||||
<>
|
<>
|
||||||
<Segment>
|
<Segment>
|
||||||
<Header as='h3'>管理渠道</Header>
|
<Header as='h3'>管理渠道</Header>
|
||||||
@@ -11,4 +11,4 @@ const Channel = () => (
|
|||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|
||||||
export default Channel;
|
export default File;
|
||||||
|
|||||||
Reference in New Issue
Block a user