diff --git a/.github/workflows/linux-release.yml b/.github/workflows/linux-release.yml
index 6f30a1d5..dd27b68d 100644
--- a/.github/workflows/linux-release.yml
+++ b/.github/workflows/linux-release.yml
@@ -5,7 +5,7 @@ permissions:
on:
push:
tags:
- - '*'
+ - 'v*.*.*'
- '!*-alpha*'
workflow_dispatch:
inputs:
@@ -44,7 +44,7 @@ jobs:
- name: Build Backend (amd64)
run: |
go mod download
- go build -ldflags "-s -w -X 'github.com/songquanpeng/one-api/common.Version=$(git describe --tags)' -extldflags '-static'" -o one-api
+ go build -ldflags "-s -w -X 'github.com/Laisky/one-api/common.Version=$(git describe --tags)' -extldflags '-static'" -o one-api
- name: Build Backend (arm64)
run: |
@@ -62,4 +62,4 @@ jobs:
draft: true
generate_release_notes: true
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/macos-release.yml b/.github/workflows/macos-release.yml
index 359c2c92..54750307 100644
--- a/.github/workflows/macos-release.yml
+++ b/.github/workflows/macos-release.yml
@@ -5,7 +5,7 @@ permissions:
on:
push:
tags:
- - '*'
+ - 'v*.*.*'
- '!*-alpha*'
workflow_dispatch:
inputs:
@@ -44,7 +44,7 @@ jobs:
- name: Build Backend
run: |
go mod download
- go build -ldflags "-X 'github.com/songquanpeng/one-api/common.Version=$(git describe --tags)'" -o one-api-macos
+ go build -ldflags "-X 'github.com/Laisky/one-api/common.Version=$(git describe --tags)'" -o one-api-macos
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
diff --git a/.github/workflows/windows-release.yml b/.github/workflows/windows-release.yml
index 4e99b75c..bd37e395 100644
--- a/.github/workflows/windows-release.yml
+++ b/.github/workflows/windows-release.yml
@@ -5,7 +5,7 @@ permissions:
on:
push:
tags:
- - '*'
+ - 'v*.*.*'
- '!*-alpha*'
workflow_dispatch:
inputs:
@@ -47,7 +47,7 @@ jobs:
- name: Build Backend
run: |
go mod download
- go build -ldflags "-s -w -X 'github.com/songquanpeng/one-api/common.Version=$(git describe --tags)'" -o one-api.exe
+ go build -ldflags "-s -w -X 'github.com/Laisky/one-api/common.Version=$(git describe --tags)'" -o one-api.exe
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
@@ -56,4 +56,4 @@ jobs:
draft: true
generate_release_notes: true
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/Dockerfile b/Dockerfile
index 96def4b2..03ff280a 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -23,7 +23,7 @@ ADD go.mod go.sum ./
RUN go mod download
COPY . .
COPY --from=builder /web/build ./web/build
-RUN go build -ldflags "-s -w -X 'github.com/songquanpeng/one-api/common.Version=$(cat VERSION)' -extldflags '-static'" -o one-api
+RUN go build -ldflags "-s -w -X 'github.com/Laisky/one-api/common.Version=$(cat VERSION)' -extldflags '-static'" -o one-api
FROM debian:bullseye
diff --git a/README.en.md b/README.en.md
index bce47353..be77fb0d 100644
--- a/README.en.md
+++ b/README.en.md
@@ -3,7 +3,7 @@
-
+
@@ -18,17 +18,17 @@ _✨ Access all LLM through the standard OpenAI API format, easy to deploy & use
-
+
-
+
-
-
+
+
@@ -37,7 +37,7 @@ _✨ Access all LLM through the standard OpenAI API format, easy to deploy & use
·
Usage
·
-
Feedback
+
Feedback
·
Screenshots
·
@@ -101,7 +101,7 @@ Nginx reference configuration:
```
server{
server_name openai.justsong.cn; # Modify your domain name accordingly
-
+
location / {
client_max_body_size 64m;
proxy_http_version 1.1;
@@ -129,15 +129,15 @@ sudo service nginx restart
The initial account username is `root` and password is `123456`.
### Manual Deployment
-1. Download the executable file from [GitHub Releases](https://github.com/songquanpeng/one-api/releases/latest) or compile from source:
+1. Download the executable file from [GitHub Releases](https://github.com/Laisky/one-api/releases/latest) or compile from source:
```shell
- git clone https://github.com/songquanpeng/one-api.git
-
+ git clone https://github.com/Laisky/one-api.git
+
# Build the frontend
cd one-api/web/default
npm install
npm run build
-
+
# Build the backend
cd ../..
go mod download
@@ -164,9 +164,9 @@ For more detailed deployment tutorials, please refer to [this page](https://iama
Please refer to the [environment variables](#environment-variables) section for details on using environment variables.
### Deployment on Control Panels (e.g., Baota)
-Refer to [#175](https://github.com/songquanpeng/one-api/issues/175) for detailed instructions.
+Refer to [#175](https://github.com/Laisky/one-api/issues/175) for detailed instructions.
-If you encounter a blank page after deployment, refer to [#97](https://github.com/songquanpeng/one-api/issues/97) for possible solutions.
+If you encounter a blank page after deployment, refer to [#97](https://github.com/Laisky/one-api/issues/97) for possible solutions.
### Deployment on Third-Party Platforms
diff --git a/README.ja.md b/README.ja.md
index c15915ec..0d759ab7 100644
--- a/README.ja.md
+++ b/README.ja.md
@@ -3,7 +3,7 @@
-
+
@@ -18,17 +18,17 @@ _✨ 標準的な OpenAI API フォーマットを通じてすべての LLM に
-
+
-
+
-
-
+
+
@@ -37,7 +37,7 @@ _✨ 標準的な OpenAI API フォーマットを通じてすべての LLM に
·
使用方法
·
-
フィードバック
+
フィードバック
·
スクリーンショット
·
@@ -130,9 +130,9 @@ sudo service nginx restart
初期アカウントのユーザー名は `root` で、パスワードは `123456` です。
### マニュアルデプロイ
-1. [GitHub Releases](https://github.com/songquanpeng/one-api/releases/latest) から実行ファイルをダウンロードする、もしくはソースからコンパイルする:
+1. [GitHub Releases](https://github.com/Laisky/one-api/releases/latest) から実行ファイルをダウンロードする、もしくはソースからコンパイルする:
```shell
- git clone https://github.com/songquanpeng/one-api.git
+ git clone https://github.com/Laisky/one-api.git
# フロントエンドのビルド
cd one-api/web/default
@@ -165,9 +165,9 @@ sudo service nginx restart
Please refer to the [environment variables](#environment-variables) section for details on using environment variables.
### コントロールパネル(例: Baota)への展開
-詳しい手順は [#175](https://github.com/songquanpeng/one-api/issues/175) を参照してください。
+詳しい手順は [#175](https://github.com/Laisky/one-api/issues/175) を参照してください。
-配置後に空白のページが表示される場合は、[#97](https://github.com/songquanpeng/one-api/issues/97) を参照してください。
+配置後に空白のページが表示される場合は、[#97](https://github.com/Laisky/one-api/issues/97) を参照してください。
### サードパーティプラットフォームへのデプロイ
diff --git a/README.md b/README.md
index 71414a78..92f74a4a 100644
--- a/README.md
+++ b/README.md
@@ -7,3 +7,4 @@ docker image: `ppcelery/one-api:latest`
- update token usage by API
- support gpt-vision
- support update user's remained quota
+- support aws claude
diff --git a/common/config/config.go b/common/config/config.go
index 98d10a50..956e3ce8 100644
--- a/common/config/config.go
+++ b/common/config/config.go
@@ -10,7 +10,7 @@ import (
"sync"
"time"
- "github.com/songquanpeng/one-api/common/env"
+ "github.com/Laisky/one-api/common/env"
)
func init() {
diff --git a/common/config/key.go b/common/config/key.go
index 4b503c2d..d2bab7d2 100644
--- a/common/config/key.go
+++ b/common/config/key.go
@@ -6,4 +6,7 @@ const (
KeyAPIVersion = KeyPrefix + "api_version"
KeyLibraryID = KeyPrefix + "library_id"
KeyPlugin = KeyPrefix + "plugin"
+ KeySK = KeyPrefix + "sk"
+ KeyAK = KeyPrefix + "ak"
+ KeyRegion = KeyPrefix + "region"
)
diff --git a/common/ctxkey/key.go b/common/ctxkey/key.go
new file mode 100644
index 00000000..6f1002bd
--- /dev/null
+++ b/common/ctxkey/key.go
@@ -0,0 +1,7 @@
+package ctxkey
+
+var (
+ RequestModel = "request_model"
+ ConvertedRequest = "converted_request"
+ OriginalModel = "original_model"
+)
diff --git a/common/database.go b/common/database.go
index f2db759f..dda2f5e1 100644
--- a/common/database.go
+++ b/common/database.go
@@ -1,7 +1,7 @@
package common
import (
- "github.com/songquanpeng/one-api/common/env"
+ "github.com/Laisky/one-api/common/env"
)
var UsingSQLite = false
diff --git a/common/helper/helper.go b/common/helper/helper.go
index 35d075bc..279c3205 100644
--- a/common/helper/helper.go
+++ b/common/helper/helper.go
@@ -10,7 +10,7 @@ import (
"strconv"
"strings"
- "github.com/songquanpeng/one-api/common/random"
+ "github.com/Laisky/one-api/common/random"
)
func OpenBrowser(url string) {
diff --git a/common/image/image_test.go b/common/image/image_test.go
index 15ed78bc..3a156b24 100644
--- a/common/image/image_test.go
+++ b/common/image/image_test.go
@@ -12,7 +12,7 @@ import (
"strings"
"testing"
- img "github.com/songquanpeng/one-api/common/image"
+ img "github.com/Laisky/one-api/common/image"
"github.com/stretchr/testify/assert"
_ "golang.org/x/image/webp"
diff --git a/common/init.go b/common/init.go
index 2c1204d6..60a275d8 100644
--- a/common/init.go
+++ b/common/init.go
@@ -3,8 +3,8 @@ package common
import (
"flag"
"fmt"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
"log"
"os"
"path/filepath"
@@ -20,7 +20,7 @@ var (
func printHelp() {
fmt.Println("One API " + Version + " - All in one API service for OpenAI API.")
fmt.Println("Copyright (C) 2023 JustSong. All rights reserved.")
- fmt.Println("GitHub: https://github.com/songquanpeng/one-api")
+ fmt.Println("GitHub: https://github.com/Laisky/one-api")
fmt.Println("Usage: one-api [--port ] [--log-dir ] [--version] [--help]")
}
diff --git a/common/logger/logger.go b/common/logger/logger.go
index 957d8a11..a0c4f3ac 100644
--- a/common/logger/logger.go
+++ b/common/logger/logger.go
@@ -3,9 +3,9 @@ package logger
import (
"context"
"fmt"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
"io"
"log"
"os"
diff --git a/common/message/email.go b/common/message/email.go
index 585aa37a..2fad8e41 100644
--- a/common/message/email.go
+++ b/common/message/email.go
@@ -10,7 +10,7 @@ import (
"time"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common/config"
+ "github.com/Laisky/one-api/common/config"
)
func SendEmail(subject string, receiver string, content string) error {
diff --git a/common/message/main.go b/common/message/main.go
index 068426fd..c566c4de 100644
--- a/common/message/main.go
+++ b/common/message/main.go
@@ -2,7 +2,7 @@ package message
import (
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common/config"
+ "github.com/Laisky/one-api/common/config"
)
const (
diff --git a/common/message/message-pusher.go b/common/message/message-pusher.go
index e693ec26..a652dadf 100644
--- a/common/message/message-pusher.go
+++ b/common/message/message-pusher.go
@@ -4,7 +4,7 @@ import (
"bytes"
"encoding/json"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common/config"
+ "github.com/Laisky/one-api/common/config"
"net/http"
)
diff --git a/common/network/ip.go b/common/network/ip.go
index 0fbe5e6f..0acef6be 100644
--- a/common/network/ip.go
+++ b/common/network/ip.go
@@ -3,7 +3,7 @@ package network
import (
"context"
"fmt"
- "github.com/songquanpeng/one-api/common/logger"
+ "github.com/Laisky/one-api/common/logger"
"net"
"strings"
)
diff --git a/common/redis.go b/common/redis.go
index f3205567..6395e7fd 100644
--- a/common/redis.go
+++ b/common/redis.go
@@ -2,8 +2,8 @@ package common
import (
"context"
+ "github.com/Laisky/one-api/common/logger"
"github.com/go-redis/redis/v8"
- "github.com/songquanpeng/one-api/common/logger"
"os"
"time"
)
diff --git a/common/utils.go b/common/utils.go
index ecee2c8e..580bac39 100644
--- a/common/utils.go
+++ b/common/utils.go
@@ -2,7 +2,7 @@ package common
import (
"fmt"
- "github.com/songquanpeng/one-api/common/config"
+ "github.com/Laisky/one-api/common/config"
)
func LogQuota(quota int64) string {
diff --git a/controller/auth/github.go b/controller/auth/github.go
index 22b48976..45199091 100644
--- a/controller/auth/github.go
+++ b/controller/auth/github.go
@@ -5,13 +5,13 @@ import (
"encoding/json"
"fmt"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/controller"
+ "github.com/Laisky/one-api/model"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/random"
- "github.com/songquanpeng/one-api/controller"
- "github.com/songquanpeng/one-api/model"
"net/http"
"strconv"
"time"
diff --git a/controller/auth/lark.go b/controller/auth/lark.go
index a1dd8e84..5b7d8b71 100644
--- a/controller/auth/lark.go
+++ b/controller/auth/lark.go
@@ -9,12 +9,12 @@ import (
"time"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/controller"
+ "github.com/Laisky/one-api/model"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/controller"
- "github.com/songquanpeng/one-api/model"
)
type LarkOAuthResponse struct {
diff --git a/controller/auth/wechat.go b/controller/auth/wechat.go
index da1b513b..9ed66b35 100644
--- a/controller/auth/wechat.go
+++ b/controller/auth/wechat.go
@@ -4,10 +4,10 @@ import (
"encoding/json"
"fmt"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/controller"
+ "github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/controller"
- "github.com/songquanpeng/one-api/model"
"net/http"
"strconv"
"time"
diff --git a/controller/billing.go b/controller/billing.go
index dd518678..245f6922 100644
--- a/controller/billing.go
+++ b/controller/billing.go
@@ -1,10 +1,10 @@
package controller
import (
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/model"
+ relaymodel "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/model"
- relaymodel "github.com/songquanpeng/one-api/relay/model"
)
func GetSubscription(c *gin.Context) {
diff --git a/controller/channel-billing.go b/controller/channel-billing.go
index 79ef322a..6fe21760 100644
--- a/controller/channel-billing.go
+++ b/controller/channel-billing.go
@@ -9,17 +9,17 @@ import (
"time"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/monitor"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/client"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/model"
+ "github.com/Laisky/one-api/monitor"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/client"
"github.com/gin-gonic/gin"
)
-// https://github.com/songquanpeng/one-api/issues/79
+// https://github.com/Laisky/one-api/issues/79
type OpenAISubscriptionResponse struct {
Object string `json:"object"`
diff --git a/controller/channel-test.go b/controller/channel-test.go
index 535b21bd..346c2a01 100644
--- a/controller/channel-test.go
+++ b/controller/channel-test.go
@@ -14,18 +14,18 @@ import (
"time"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/message"
- "github.com/songquanpeng/one-api/middleware"
- "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/monitor"
- relay "github.com/songquanpeng/one-api/relay"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/controller"
- "github.com/songquanpeng/one-api/relay/meta"
- relaymodel "github.com/songquanpeng/one-api/relay/model"
- "github.com/songquanpeng/one-api/relay/relaymode"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/message"
+ "github.com/Laisky/one-api/middleware"
+ "github.com/Laisky/one-api/model"
+ "github.com/Laisky/one-api/monitor"
+ relay "github.com/Laisky/one-api/relay"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/controller"
+ "github.com/Laisky/one-api/relay/meta"
+ relaymodel "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
)
@@ -89,7 +89,7 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
if err != nil {
return err, nil
}
- if resp.StatusCode != http.StatusOK {
+ if resp != nil && resp.StatusCode != http.StatusOK {
err := controller.RelayErrorHandler(resp)
return fmt.Errorf("status code %d: %s", resp.StatusCode, err.Error.Message), &err.Error
}
diff --git a/controller/channel.go b/controller/channel.go
index 37bfb99d..803ba822 100644
--- a/controller/channel.go
+++ b/controller/channel.go
@@ -1,10 +1,10 @@
package controller
import (
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/model"
"net/http"
"strconv"
"strings"
diff --git a/controller/group.go b/controller/group.go
index 6f02394f..7dd5cc68 100644
--- a/controller/group.go
+++ b/controller/group.go
@@ -1,8 +1,8 @@
package controller
import (
+ billingratio "github.com/Laisky/one-api/relay/billing/ratio"
"github.com/gin-gonic/gin"
- billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
"net/http"
)
diff --git a/controller/log.go b/controller/log.go
index 9377b338..d30ee85c 100644
--- a/controller/log.go
+++ b/controller/log.go
@@ -1,9 +1,9 @@
package controller
import (
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/model"
"net/http"
"strconv"
)
diff --git a/controller/misc.go b/controller/misc.go
index 2928b8fb..09279742 100644
--- a/controller/misc.go
+++ b/controller/misc.go
@@ -3,10 +3,10 @@ package controller
import (
"encoding/json"
"fmt"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/message"
- "github.com/songquanpeng/one-api/model"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/message"
+ "github.com/Laisky/one-api/model"
"net/http"
"strings"
diff --git a/controller/model.go b/controller/model.go
index 01d01bf0..75986db1 100644
--- a/controller/model.go
+++ b/controller/model.go
@@ -2,14 +2,14 @@ package controller
import (
"fmt"
+ "github.com/Laisky/one-api/model"
+ relay "github.com/Laisky/one-api/relay"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/apitype"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/meta"
+ relaymodel "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/model"
- relay "github.com/songquanpeng/one-api/relay"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/apitype"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/meta"
- relaymodel "github.com/songquanpeng/one-api/relay/model"
"net/http"
"strings"
)
diff --git a/controller/option.go b/controller/option.go
index f86e3a64..0d691439 100644
--- a/controller/option.go
+++ b/controller/option.go
@@ -2,9 +2,9 @@ package controller
import (
"encoding/json"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/model"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/model"
"net/http"
"strings"
diff --git a/controller/redemption.go b/controller/redemption.go
index 8d2b3f38..dcf744b2 100644
--- a/controller/redemption.go
+++ b/controller/redemption.go
@@ -1,11 +1,11 @@
package controller
import (
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/random"
- "github.com/songquanpeng/one-api/model"
"net/http"
"strconv"
)
diff --git a/controller/relay.go b/controller/relay.go
index 51ded1d1..10aa1c3c 100644
--- a/controller/relay.go
+++ b/controller/relay.go
@@ -4,20 +4,22 @@ import (
"bytes"
"context"
"fmt"
- "github.com/gin-gonic/gin"
- "github.com/pkg/errors"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/middleware"
- dbmodel "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/monitor"
- "github.com/songquanpeng/one-api/relay/controller"
- "github.com/songquanpeng/one-api/relay/model"
- "github.com/songquanpeng/one-api/relay/relaymode"
"io"
"net/http"
+
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/ctxkey"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/middleware"
+ dbmodel "github.com/Laisky/one-api/model"
+ "github.com/Laisky/one-api/monitor"
+ "github.com/Laisky/one-api/relay/controller"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
+ "github.com/gin-gonic/gin"
+ "github.com/pkg/errors"
)
// https://platform.openai.com/docs/api-reference/chat
@@ -55,7 +57,7 @@ func Relay(c *gin.Context) {
lastFailedChannelId := channelId
channelName := c.GetString("channel_name")
group := c.GetString("group")
- originalModel := c.GetString(common.CtxKeyOriginModel)
+ originalModel := c.GetString(ctxkey.OriginalModel)
go processChannelRelayError(ctx, channelId, channelName, bizErr)
requestId := c.GetString(logger.RequestIdKey)
retryTimes := config.RetryTimes
diff --git a/controller/token.go b/controller/token.go
index 74d31547..ff3a63ba 100644
--- a/controller/token.go
+++ b/controller/token.go
@@ -5,14 +5,14 @@ import (
"net/http"
"strconv"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/network"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
"github.com/jinzhu/copier"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/network"
- "github.com/songquanpeng/one-api/common/random"
- "github.com/songquanpeng/one-api/model"
)
func GetAllTokens(c *gin.Context) {
diff --git a/controller/user.go b/controller/user.go
index bd31c034..eb2a6f18 100644
--- a/controller/user.go
+++ b/controller/user.go
@@ -3,10 +3,10 @@ package controller
import (
"encoding/json"
"fmt"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/random"
- "github.com/songquanpeng/one-api/model"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/model"
"net/http"
"strconv"
"time"
diff --git a/go.mod b/go.mod
index aa15158c..f23735a7 100644
--- a/go.mod
+++ b/go.mod
@@ -1,32 +1,37 @@
-module github.com/songquanpeng/one-api
+module github.com/Laisky/one-api
+// +heroku goVersion go1.18
go 1.21
+toolchain go1.22.0
+
require (
github.com/Laisky/errors/v2 v2.0.1
github.com/Laisky/go-utils/v4 v4.9.1
github.com/aws/aws-sdk-go-v2 v1.26.1
github.com/aws/aws-sdk-go-v2/credentials v1.17.11
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4
- github.com/gin-contrib/cors v1.7.0
- github.com/gin-contrib/gzip v0.0.6
- github.com/gin-contrib/sessions v0.0.5
- github.com/gin-contrib/static v1.1.0
+ github.com/gin-contrib/cors v1.7.1
+ github.com/gin-contrib/gzip v1.0.0
+ github.com/gin-contrib/sessions v1.0.0
+ github.com/gin-contrib/static v1.1.1
github.com/gin-gonic/gin v1.9.1
github.com/go-playground/validator/v10 v10.19.0
github.com/go-redis/redis/v8 v8.11.5
+ github.com/golang-jwt/jwt v3.2.2+incompatible
github.com/google/uuid v1.6.0
+ github.com/gorilla/websocket v1.5.1
github.com/jinzhu/copier v0.4.0
github.com/pkg/errors v0.9.1
github.com/pkoukk/tiktoken-go v0.1.6
github.com/smartystreets/goconvey v1.8.1
- github.com/stretchr/testify v1.8.4
- golang.org/x/crypto v0.21.0
+ github.com/stretchr/testify v1.9.0
+ golang.org/x/crypto v0.22.0
golang.org/x/image v0.15.0
- gorm.io/driver/mysql v1.5.5
+ gorm.io/driver/mysql v1.5.6
gorm.io/driver/postgres v1.5.7
gorm.io/driver/sqlite v1.5.5
- gorm.io/gorm v1.25.8
+ gorm.io/gorm v1.25.9
)
require (
@@ -40,7 +45,7 @@ require (
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect
github.com/aws/smithy-go v1.20.2 // indirect
- github.com/bytedance/sonic v1.11.2 // indirect
+ github.com/bytedance/sonic v1.11.3 // indirect
github.com/cespare/xxhash v1.1.0 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
@@ -58,13 +63,12 @@ require (
github.com/goccy/go-json v0.10.2 // indirect
github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect
github.com/gopherjs/gopherjs v1.17.2 // indirect
- github.com/gorilla/context v1.1.1 // indirect
- github.com/gorilla/securecookie v1.1.1 // indirect
- github.com/gorilla/sessions v1.2.1 // indirect
+ github.com/gorilla/context v1.1.2 // indirect
+ github.com/gorilla/securecookie v1.1.2 // indirect
+ github.com/gorilla/sessions v1.2.2 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
- github.com/jackc/pgx/v5 v5.5.4 // indirect
- github.com/jackc/puddle/v2 v2.2.1 // indirect
+ github.com/jackc/pgx/v5 v5.4.3 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/json-iterator/go v1.1.12 // indirect
@@ -72,10 +76,10 @@ require (
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
- github.com/mattn/go-sqlite3 v2.0.3+incompatible // indirect
+ github.com/mattn/go-sqlite3 v1.14.17 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
- github.com/pelletier/go-toml/v2 v2.1.1 // indirect
+ github.com/pelletier/go-toml/v2 v2.2.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/smarty/assertions v1.15.0 // indirect
github.com/tailscale/hujson v0.0.0-20221223112325-20486734a56a // indirect
@@ -87,8 +91,8 @@ require (
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect
golang.org/x/net v0.22.0 // indirect
golang.org/x/sync v0.6.0 // indirect
- golang.org/x/sys v0.18.0 // indirect
- golang.org/x/term v0.18.0 // indirect
+ golang.org/x/sys v0.19.0 // indirect
+ golang.org/x/term v0.19.0 // indirect
golang.org/x/text v0.14.0 // indirect
golang.org/x/tools v0.7.0 // indirect
google.golang.org/protobuf v1.33.0 // indirect
diff --git a/go.sum b/go.sum
index a787fc67..76be0aae 100644
--- a/go.sum
+++ b/go.sum
@@ -34,8 +34,8 @@ github.com/brianvoe/gofakeit/v6 v6.23.2 h1:lVde18uhad5wII/f5RMVFLtdQNE0HaGFuBUXm
github.com/brianvoe/gofakeit/v6 v6.23.2/go.mod h1:Ow6qC71xtwm79anlwKRlWZW6zVq9D2XHE4QSSMP/rU8=
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
-github.com/bytedance/sonic v1.11.2 h1:ywfwo0a/3j9HR8wsYGWsIWl2mvRsI950HyoxiBERw5A=
-github.com/bytedance/sonic v1.11.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
+github.com/bytedance/sonic v1.11.3 h1:jRN+yEjakWh8aK5FzrciUHG8OFXK+4/KrAX/ysEtHAA=
+github.com/bytedance/sonic v1.11.3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE=
@@ -47,7 +47,6 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpV
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
-github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -63,17 +62,16 @@ github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uq
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gammazero/deque v0.2.1 h1:qSdsbG6pgp6nL7A0+K/B7s12mcCY/5l5SIUpMOl+dC0=
github.com/gammazero/deque v0.2.1/go.mod h1:LFroj8x4cMYCukHJDbxFCkT+r9AndaJnFMuZDV34tuU=
-github.com/gin-contrib/cors v1.7.0 h1:wZX2wuZ0o7rV2/1i7gb4Jn+gW7HBqaP91fizJkBUJOA=
-github.com/gin-contrib/cors v1.7.0/go.mod h1:cI+h6iOAyxKRtUtC6iF/Si1KSFvGm/gK+kshxlCi8ro=
-github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4=
-github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk=
-github.com/gin-contrib/sessions v0.0.5 h1:CATtfHmLMQrMNpJRgzjWXD7worTh7g7ritsQfmF+0jE=
-github.com/gin-contrib/sessions v0.0.5/go.mod h1:vYAuaUPqie3WUSsft6HUlCjlwwoJQs97miaG2+7neKY=
+github.com/gin-contrib/cors v1.7.1 h1:s9SIppU/rk8enVvkzwiC2VK3UZ/0NNGsWfUKvV55rqs=
+github.com/gin-contrib/cors v1.7.1/go.mod h1:n/Zj7B4xyrgk/cX1WCX2dkzFfaNm/xJb6oIUk7WTtps=
+github.com/gin-contrib/gzip v1.0.0 h1:UKN586Po/92IDX6ie5CWLgMI81obiIp5nSP85T3wlTk=
+github.com/gin-contrib/gzip v1.0.0/go.mod h1:CtG7tQrPB3vIBo6Gat9FVUsis+1emjvQqd66ME5TdnE=
+github.com/gin-contrib/sessions v1.0.0 h1:r5GLta4Oy5xo9rAwMHx8B4wLpeRGHMdz9NafzJAdP8Y=
+github.com/gin-contrib/sessions v1.0.0/go.mod h1:DN0f4bvpqMQElDdi+gNGScrP2QEI04IErRyMFyorUOI=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
-github.com/gin-contrib/static v1.1.0 h1:MMVoe+sAwMbt1rqH91C48LAmmBn9tuVJInaT5b/64OI=
-github.com/gin-contrib/static v1.1.0/go.mod h1:41ymO4uaFIGCy1kco0PBunNbvXeesKsyPR9TnVKR1BQ=
-github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
+github.com/gin-contrib/static v1.1.1 h1:XEvBd4DDLG1HBlyPBQU1XO8NlTpw6mgdqcPteetYA5k=
+github.com/gin-contrib/static v1.1.1/go.mod h1:yRGmar7+JYvbMLRPIi4H5TVVSBwULfT9vetnVD0IO74=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/go-json-experiment/json v0.0.0-20231011163920-8aa127fd5801 h1:PRieymvnGuBZUnWVQPBOemqlIhRznqtSxs/1LqlWe20=
@@ -81,52 +79,49 @@ github.com/go-json-experiment/json v0.0.0-20231011163920-8aa127fd5801/go.mod h1:
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
-github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
-github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
-github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
-github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4=
github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc=
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
-github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
-github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
-github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
+github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 h1:5/4TSDzpDnHQ8rKEEQBjRlYx77mHOvXu08oGchxej7o=
github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/17GFCPDK39NRarlMI+kt+O60S12cNB5J9Y=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
+github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g=
github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k=
-github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=
-github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
-github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
-github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
-github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
-github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
+github.com/gorilla/context v1.1.2 h1:WRkNAv2uoa03QNIc1A6u4O7DAGMUVoopZhkiXWA2V1o=
+github.com/gorilla/context v1.1.2/go.mod h1:KDPwT9i/MeWHiLl90fuTgrt4/wPcv75vFAZLaOOcbxM=
+github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA=
+github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo=
+github.com/gorilla/sessions v1.2.2 h1:lqzMYz6bOfvn2WriPUjNByzeXIlVzURcPmgMczkmTjY=
+github.com/gorilla/sessions v1.2.2/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ=
+github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
+github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
github.com/graph-gophers/graphql-go v1.5.0/go.mod h1:YtmJZDLbF1YYNrlNAuiO5zAStUWc3XZT07iGsVqe1Os=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
-github.com/jackc/pgx/v5 v5.5.4 h1:Xp2aQS8uXButQdnCMWNmvx6UysWQQC+u1EoizjguY+8=
-github.com/jackc/pgx/v5 v5.5.4/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A=
-github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
-github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
+github.com/jackc/pgx/v5 v5.4.3 h1:cxFyXhxlvAifxnkKKdlxv8XqUf59tDlYjnV5YYfsJJY=
+github.com/jackc/pgx/v5 v5.4.3/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
@@ -141,22 +136,16 @@ github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa02
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
-github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
-github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
-github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
-github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
-github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U=
-github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
+github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM=
+github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -169,10 +158,8 @@ github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs=
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
-github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
-github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
-github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
-github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
+github.com/pelletier/go-toml/v2 v2.2.0 h1:QLgLl2yMN7N+ruc31VynXs1vhMZa7CeHHejIeBAsoHo=
+github.com/pelletier/go-toml/v2 v2.2.0/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkoukk/tiktoken-go v0.1.6 h1:JF0TlJzhTbrI30wCvFuiw6FzP2+/bR+FIxUdgEAcUsw=
@@ -181,7 +168,6 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g=
github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
-github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/smarty/assertions v1.15.0 h1:cR//PqUBUiQRakZWqBiFFQ9wb8emQGDb0HeGdqGByCY=
@@ -193,20 +179,19 @@ github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasO
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
-github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tailscale/hujson v0.0.0-20221223112325-20486734a56a h1:SJy1Pu0eH1C29XwJucQo73FrleVK6t4kYz4NVhp34Yw=
github.com/tailscale/hujson v0.0.0-20221223112325-20486734a56a/go.mod h1:DFSS3NAGHthKo1gTlmEcSBiZrRJXi28rLNd/1udP1c8=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
-github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
-github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
go.opentelemetry.io/otel v1.6.3/go.mod h1:7BgNga5fNlF/iZjG06hM3yofffp0ofKCDwSXx1GC4dI=
@@ -222,9 +207,8 @@ golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
-golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
-golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
+golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
+golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug=
@@ -234,7 +218,6 @@ golang.org/x/mod v0.9.0 h1:KENHtAZL2y3NLMYZeHY9DW8HW8V+kQyJsY/V9JlKvCs=
golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -242,55 +225,42 @@ golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
-golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
-golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
-golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
+golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
+golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q=
+golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
-golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.7.0 h1:W4OVu8VVOaIO0yzWMNdepAulS7YfoS3Zabrm8DOXXU4=
golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
-google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
-gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gorm.io/driver/mysql v1.5.5 h1:WxklwX6FozMs1gk9yVadxGfjGiJjrBKPvIIvYZOMyws=
-gorm.io/driver/mysql v1.5.5/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM=
+gorm.io/driver/mysql v1.5.6 h1:Ld4mkIickM+EliaQZQx3uOJDJHtrd70MxAUqWqlx3Y8=
+gorm.io/driver/mysql v1.5.6/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM=
gorm.io/driver/postgres v1.5.7 h1:8ptbNJTDbEmhdr62uReG5BGkdQyeasu/FZHxI0IMGnM=
gorm.io/driver/postgres v1.5.7/go.mod h1:3e019WlBaYI5o5LIdNV+LyxCMNtLOQETBXL2h4chKpA=
gorm.io/driver/sqlite v1.5.5 h1:7MDMtUZhV065SilG62E0MquljeArQZNfJnjd9i9gx3E=
gorm.io/driver/sqlite v1.5.5/go.mod h1:6NgQ7sQWAIFsPrJJl1lSNSu2TABh0ZZ/zm5fosATavE=
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
-gorm.io/gorm v1.25.8 h1:WAGEZ/aEcznN4D03laj8DKnehe1e9gYQAjW8xyPRdeo=
-gorm.io/gorm v1.25.8/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
+gorm.io/gorm v1.25.9 h1:wct0gxZIELDk8+ZqF/MVnHLkA1rvYlBWUMv2EdsK1g8=
+gorm.io/gorm v1.25.9/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
diff --git a/main.go b/main.go
index 3ee1dc94..f5e5e7e6 100644
--- a/main.go
+++ b/main.go
@@ -7,17 +7,17 @@ import (
"os"
"strconv"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/controller"
+ "github.com/Laisky/one-api/middleware"
+ "github.com/Laisky/one-api/model"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/router"
"github.com/gin-contrib/sessions"
"github.com/gin-contrib/sessions/cookie"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/controller"
- "github.com/songquanpeng/one-api/middleware"
- "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/router"
)
//go:embed web/build/*
diff --git a/middleware/auth.go b/middleware/auth.go
index d01007ac..ca6cb1b0 100644
--- a/middleware/auth.go
+++ b/middleware/auth.go
@@ -2,12 +2,12 @@ package middleware
import (
"fmt"
+ "github.com/Laisky/one-api/common/blacklist"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/network"
+ "github.com/Laisky/one-api/model"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/blacklist"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/network"
- "github.com/songquanpeng/one-api/model"
"net/http"
"strings"
)
diff --git a/middleware/distributor.go b/middleware/distributor.go
index b6952d42..68cfd33b 100644
--- a/middleware/distributor.go
+++ b/middleware/distributor.go
@@ -6,13 +6,14 @@ import (
"strconv"
"strings"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/ctxkey"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/model"
+ "github.com/Laisky/one-api/relay/billing/ratio"
+ "github.com/Laisky/one-api/relay/channeltype"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/relay/billing/ratio"
- "github.com/songquanpeng/one-api/relay/channeltype"
)
type ModelRequest struct {
@@ -79,7 +80,7 @@ func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, mode
c.Set("channel_id", channel.Id)
c.Set("channel_name", channel.Name)
c.Set("model_mapping", channel.GetModelMapping())
- c.Set(common.CtxKeyOriginModel, modelName) // for retry
+ c.Set(ctxkey.OriginalModel, modelName) // for retry
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
c.Set("base_url", channel.GetBaseURL())
// this is for backward compatibility
diff --git a/middleware/logger.go b/middleware/logger.go
index 6aae4f23..25fd1b34 100644
--- a/middleware/logger.go
+++ b/middleware/logger.go
@@ -2,8 +2,8 @@ package middleware
import (
"fmt"
+ "github.com/Laisky/one-api/common/logger"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/logger"
)
func SetUpLogger(server *gin.Engine) {
diff --git a/middleware/rate-limit.go b/middleware/rate-limit.go
index 0f300f2b..1fa01fca 100644
--- a/middleware/rate-limit.go
+++ b/middleware/rate-limit.go
@@ -3,9 +3,9 @@ package middleware
import (
"context"
"fmt"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
"net/http"
"time"
)
diff --git a/middleware/recover.go b/middleware/recover.go
index cfc3f827..638b6009 100644
--- a/middleware/recover.go
+++ b/middleware/recover.go
@@ -2,9 +2,9 @@ package middleware
import (
"fmt"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/logger"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/logger"
"net/http"
"runtime/debug"
)
@@ -21,7 +21,7 @@ func RelayPanicRecover() gin.HandlerFunc {
logger.Errorf(ctx, fmt.Sprintf("request body: %s", string(body)))
c.JSON(http.StatusInternalServerError, gin.H{
"error": gin.H{
- "message": fmt.Sprintf("Panic detected, error: %v. Please submit an issue with the related log here: https://github.com/songquanpeng/one-api", err),
+ "message": fmt.Sprintf("Panic detected, error: %v. Please submit an issue with the related log here: https://github.com/Laisky/one-api", err),
"type": "one_api_panic",
},
})
diff --git a/middleware/request-id.go b/middleware/request-id.go
index a4c49ddb..3042f608 100644
--- a/middleware/request-id.go
+++ b/middleware/request-id.go
@@ -2,9 +2,9 @@ package middleware
import (
"context"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
)
func RequestId() func(c *gin.Context) {
diff --git a/middleware/turnstile-check.go b/middleware/turnstile-check.go
index 403bcb34..034fe0f7 100644
--- a/middleware/turnstile-check.go
+++ b/middleware/turnstile-check.go
@@ -2,10 +2,10 @@ package middleware
import (
"encoding/json"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
"net/http"
"net/url"
)
diff --git a/middleware/utils.go b/middleware/utils.go
index b65b018b..0bd2a10c 100644
--- a/middleware/utils.go
+++ b/middleware/utils.go
@@ -2,10 +2,10 @@ package middleware
import (
"fmt"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
"strings"
)
diff --git a/model/ability.go b/model/ability.go
index 2db72518..c68739f0 100644
--- a/model/ability.go
+++ b/model/ability.go
@@ -2,7 +2,7 @@ package model
import (
"context"
- "github.com/songquanpeng/one-api/common"
+ "github.com/Laisky/one-api/common"
"gorm.io/gorm"
"sort"
"strings"
diff --git a/model/cache.go b/model/cache.go
index a05cec19..a5207606 100644
--- a/model/cache.go
+++ b/model/cache.go
@@ -5,10 +5,10 @@ import (
"encoding/json"
"fmt"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/random"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
"math/rand"
"sort"
"strconv"
diff --git a/model/channel.go b/model/channel.go
index e667f7e7..b8066d11 100644
--- a/model/channel.go
+++ b/model/channel.go
@@ -3,9 +3,9 @@ package model
import (
"encoding/json"
"fmt"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
"gorm.io/gorm"
)
diff --git a/model/log.go b/model/log.go
index 6fba776a..b6393876 100644
--- a/model/log.go
+++ b/model/log.go
@@ -3,10 +3,10 @@ package model
import (
"context"
"fmt"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
"gorm.io/gorm"
)
diff --git a/model/main.go b/model/main.go
index 3c6ab79d..34730ddd 100644
--- a/model/main.go
+++ b/model/main.go
@@ -6,13 +6,13 @@ import (
"strings"
"time"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/env"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
"github.com/pkg/errors"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/env"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/random"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/driver/sqlite"
diff --git a/model/option.go b/model/option.go
index bed8d4c3..b6a29270 100644
--- a/model/option.go
+++ b/model/option.go
@@ -1,9 +1,9 @@
package model
import (
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ billingratio "github.com/Laisky/one-api/relay/billing/ratio"
"strconv"
"strings"
"time"
diff --git a/model/redemption.go b/model/redemption.go
index 62428d35..e0a6cc05 100644
--- a/model/redemption.go
+++ b/model/redemption.go
@@ -3,8 +3,8 @@ package model
import (
"fmt"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/helper"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/helper"
"gorm.io/gorm"
)
diff --git a/model/token.go b/model/token.go
index 10fd0d78..39a13d40 100644
--- a/model/token.go
+++ b/model/token.go
@@ -4,11 +4,11 @@ import (
"fmt"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/message"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/message"
"gorm.io/gorm"
)
diff --git a/model/user.go b/model/user.go
index 3cc1f9c0..209d791c 100644
--- a/model/user.go
+++ b/model/user.go
@@ -5,11 +5,11 @@ import (
"strings"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/blacklist"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/random"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/blacklist"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
"gorm.io/gorm"
)
diff --git a/model/utils.go b/model/utils.go
index a55eb4b6..12c0f037 100644
--- a/model/utils.go
+++ b/model/utils.go
@@ -1,8 +1,8 @@
package model
import (
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
"sync"
"time"
)
diff --git a/monitor/channel.go b/monitor/channel.go
index 7e5dc58a..b4f84916 100644
--- a/monitor/channel.go
+++ b/monitor/channel.go
@@ -2,10 +2,10 @@ package monitor
import (
"fmt"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/message"
- "github.com/songquanpeng/one-api/model"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/message"
+ "github.com/Laisky/one-api/model"
)
func notifyRootUser(subject string, content string) {
diff --git a/monitor/manage.go b/monitor/manage.go
index 946e78af..dbe87c9c 100644
--- a/monitor/manage.go
+++ b/monitor/manage.go
@@ -1,8 +1,8 @@
package monitor
import (
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/relay/model"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/relay/model"
"net/http"
"strings"
)
diff --git a/monitor/metric.go b/monitor/metric.go
index 98bc546e..3479dc90 100644
--- a/monitor/metric.go
+++ b/monitor/metric.go
@@ -1,7 +1,7 @@
package monitor
import (
- "github.com/songquanpeng/one-api/common/config"
+ "github.com/Laisky/one-api/common/config"
)
var store = make(map[int][]bool)
diff --git a/relay/adaptor.go b/relay/adaptor.go
index 588cb4c2..1519b40e 100644
--- a/relay/adaptor.go
+++ b/relay/adaptor.go
@@ -1,41 +1,46 @@
package relay
import (
- "github.com/songquanpeng/one-api/relay/adaptor"
- "github.com/songquanpeng/one-api/relay/adaptor/aiproxy"
- "github.com/songquanpeng/one-api/relay/adaptor/anthropic"
- "github.com/songquanpeng/one-api/relay/adaptor/aws"
- "github.com/songquanpeng/one-api/relay/adaptor/gemini"
- "github.com/songquanpeng/one-api/relay/adaptor/ollama"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/adaptor/palm"
- "github.com/songquanpeng/one-api/relay/apitype"
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/adaptor/aiproxy"
+ "github.com/Laisky/one-api/relay/adaptor/ali"
+ "github.com/Laisky/one-api/relay/adaptor/anthropic"
+ "github.com/Laisky/one-api/relay/adaptor/aws"
+ "github.com/Laisky/one-api/relay/adaptor/baidu"
+ "github.com/Laisky/one-api/relay/adaptor/gemini"
+ "github.com/Laisky/one-api/relay/adaptor/ollama"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/adaptor/palm"
+ "github.com/Laisky/one-api/relay/adaptor/tencent"
+ "github.com/Laisky/one-api/relay/adaptor/xunfei"
+ "github.com/Laisky/one-api/relay/adaptor/zhipu"
+ "github.com/Laisky/one-api/relay/apitype"
)
func GetAdaptor(apiType int) adaptor.Adaptor {
switch apiType {
case apitype.AIProxyLibrary:
return &aiproxy.Adaptor{}
- // case apitype.Ali:
- // return &ali.Adaptor{}
+ case apitype.Ali:
+ return &ali.Adaptor{}
case apitype.Anthropic:
return &anthropic.Adaptor{}
case apitype.AwsClaude:
return &aws.Adaptor{}
- // case apitype.Baidu:
- // return &baidu.Adaptor{}
+ case apitype.Baidu:
+ return &baidu.Adaptor{}
case apitype.Gemini:
return &gemini.Adaptor{}
case apitype.OpenAI:
return &openai.Adaptor{}
case apitype.PaLM:
return &palm.Adaptor{}
- // case apitype.Tencent:
- // return &tencent.Adaptor{}
- // case apitype.Xunfei:
- // return &xunfei.Adaptor{}
- // case apitype.Zhipu:
- // return &zhipu.Adaptor{}
+ case apitype.Tencent:
+ return &tencent.Adaptor{}
+ case apitype.Xunfei:
+ return &xunfei.Adaptor{}
+ case apitype.Zhipu:
+ return &zhipu.Adaptor{}
case apitype.Ollama:
return &ollama.Adaptor{}
}
diff --git a/relay/adaptor/aiproxy/adaptor.go b/relay/adaptor/aiproxy/adaptor.go
index 31865698..ae7a8290 100644
--- a/relay/adaptor/aiproxy/adaptor.go
+++ b/relay/adaptor/aiproxy/adaptor.go
@@ -3,11 +3,11 @@ package aiproxy
import (
"fmt"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/relay/adaptor"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)
diff --git a/relay/adaptor/aiproxy/constants.go b/relay/adaptor/aiproxy/constants.go
index 818d2709..aec821e3 100644
--- a/relay/adaptor/aiproxy/constants.go
+++ b/relay/adaptor/aiproxy/constants.go
@@ -1,6 +1,6 @@
package aiproxy
-import "github.com/songquanpeng/one-api/relay/adaptor/openai"
+import "github.com/Laisky/one-api/relay/adaptor/openai"
var ModelList = []string{""}
diff --git a/relay/adaptor/aiproxy/main.go b/relay/adaptor/aiproxy/main.go
index 961260de..8ab319e7 100644
--- a/relay/adaptor/aiproxy/main.go
+++ b/relay/adaptor/aiproxy/main.go
@@ -9,14 +9,14 @@ import (
"strconv"
"strings"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/constant"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/random"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/constant"
- "github.com/songquanpeng/one-api/relay/model"
)
// https://docs.aiproxy.io/dev/library#使用已经定制好的知识库进行对话问答
diff --git a/relay/adaptor/ali/adaptor.go b/relay/adaptor/ali/adaptor.go
index e004211e..f12eae8c 100644
--- a/relay/adaptor/ali/adaptor.go
+++ b/relay/adaptor/ali/adaptor.go
@@ -1,105 +1,106 @@
package ali
-// import (
-// "github.com/Laisky/errors/v2"
-// "fmt"
-// "github.com/gin-gonic/gin"
-// "github.com/songquanpeng/one-api/common/config"
-// "github.com/songquanpeng/one-api/relay/adaptor"
-// "github.com/songquanpeng/one-api/relay/meta"
-// "github.com/songquanpeng/one-api/relay/model"
-// "github.com/songquanpeng/one-api/relay/relaymode"
-// "io"
-// "net/http"
-// )
+import (
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
-// // https://help.aliyun.com/zh/dashscope/developer-reference/api-details
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
+ "github.com/gin-gonic/gin"
+)
-// type Adaptor struct {
-// }
+// https://help.aliyun.com/zh/dashscope/developer-reference/api-details
-// func (a *Adaptor) Init(meta *meta.Meta) {
+type Adaptor struct {
+}
-// }
+func (a *Adaptor) Init(meta *meta.Meta) {
-// func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
-// fullRequestURL := ""
-// switch meta.Mode {
-// case relaymode.Embeddings:
-// fullRequestURL = fmt.Sprintf("%s/api/v1/services/embeddings/text-embedding/text-embedding", meta.BaseURL)
-// case relaymode.ImagesGenerations:
-// fullRequestURL = fmt.Sprintf("%s/api/v1/services/aigc/text2image/image-synthesis", meta.BaseURL)
-// default:
-// fullRequestURL = fmt.Sprintf("%s/api/v1/services/aigc/text-generation/generation", meta.BaseURL)
-// }
+}
-// return fullRequestURL, nil
-// }
+func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
+ fullRequestURL := ""
+ switch meta.Mode {
+ case relaymode.Embeddings:
+ fullRequestURL = fmt.Sprintf("%s/api/v1/services/embeddings/text-embedding/text-embedding", meta.BaseURL)
+ case relaymode.ImagesGenerations:
+ fullRequestURL = fmt.Sprintf("%s/api/v1/services/aigc/text2image/image-synthesis", meta.BaseURL)
+ default:
+ fullRequestURL = fmt.Sprintf("%s/api/v1/services/aigc/text-generation/generation", meta.BaseURL)
+ }
-// func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
-// adaptor.SetupCommonRequestHeader(c, req, meta)
-// if meta.IsStream {
-// req.Header.Set("Accept", "text/event-stream")
-// req.Header.Set("X-DashScope-SSE", "enable")
-// }
-// req.Header.Set("Authorization", "Bearer "+meta.APIKey)
+ return fullRequestURL, nil
+}
-// if meta.Mode == relaymode.ImagesGenerations {
-// req.Header.Set("X-DashScope-Async", "enable")
-// }
-// if c.GetString(config.KeyPlugin) != "" {
-// req.Header.Set("X-DashScope-Plugin", c.GetString(config.KeyPlugin))
-// }
-// return nil
-// }
+func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
+ adaptor.SetupCommonRequestHeader(c, req, meta)
+ if meta.IsStream {
+ req.Header.Set("Accept", "text/event-stream")
+ req.Header.Set("X-DashScope-SSE", "enable")
+ }
+ req.Header.Set("Authorization", "Bearer "+meta.APIKey)
-// func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
-// if request == nil {
-// return nil, errors.New("request is nil")
-// }
-// switch relayMode {
-// case relaymode.Embeddings:
-// aliEmbeddingRequest := ConvertEmbeddingRequest(*request)
-// return aliEmbeddingRequest, nil
-// default:
-// aliRequest := ConvertRequest(*request)
-// return aliRequest, nil
-// }
-// }
+ if meta.Mode == relaymode.ImagesGenerations {
+ req.Header.Set("X-DashScope-Async", "enable")
+ }
+ if c.GetString(config.KeyPlugin) != "" {
+ req.Header.Set("X-DashScope-Plugin", c.GetString(config.KeyPlugin))
+ }
+ return nil
+}
-// func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
-// if request == nil {
-// return nil, errors.New("request is nil")
-// }
+func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ switch relayMode {
+ case relaymode.Embeddings:
+ aliEmbeddingRequest := ConvertEmbeddingRequest(*request)
+ return aliEmbeddingRequest, nil
+ default:
+ aliRequest := ConvertRequest(*request)
+ return aliRequest, nil
+ }
+}
-// aliRequest := ConvertImageRequest(*request)
-// return aliRequest, nil
-// }
+func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
-// func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
-// return adaptor.DoRequestHelper(a, c, meta, requestBody)
-// }
+ aliRequest := ConvertImageRequest(*request)
+ return aliRequest, nil
+}
-// func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
-// if meta.IsStream {
-// err, usage = StreamHandler(c, resp)
-// } else {
-// switch meta.Mode {
-// case relaymode.Embeddings:
-// err, usage = EmbeddingHandler(c, resp)
-// case relaymode.ImagesGenerations:
-// err, usage = ImageHandler(c, resp)
-// default:
-// err, usage = Handler(c, resp)
-// }
-// }
-// return
-// }
+func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
+ return adaptor.DoRequestHelper(a, c, meta, requestBody)
+}
-// func (a *Adaptor) GetModelList() []string {
-// return ModelList
-// }
+func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
+ if meta.IsStream {
+ err, usage = StreamHandler(c, resp)
+ } else {
+ switch meta.Mode {
+ case relaymode.Embeddings:
+ err, usage = EmbeddingHandler(c, resp)
+ case relaymode.ImagesGenerations:
+ err, usage = ImageHandler(c, resp)
+ default:
+ err, usage = Handler(c, resp)
+ }
+ }
+ return
+}
-// func (a *Adaptor) GetChannelName() string {
-// return "ali"
-// }
+func (a *Adaptor) GetModelList() []string {
+ return ModelList
+}
+
+func (a *Adaptor) GetChannelName() string {
+ return "ali"
+}
diff --git a/relay/adaptor/ali/image.go b/relay/adaptor/ali/image.go
index cef509e2..c03c0bfd 100644
--- a/relay/adaptor/ali/image.go
+++ b/relay/adaptor/ali/image.go
@@ -3,17 +3,18 @@ package ali
import (
"encoding/base64"
"encoding/json"
+ "errors"
"fmt"
- "github.com/Laisky/errors/v2"
- "github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
"time"
+
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/gin-gonic/gin"
)
func ImageHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
diff --git a/relay/adaptor/ali/main.go b/relay/adaptor/ali/main.go
index 9537b2fa..03a5019b 100644
--- a/relay/adaptor/ali/main.go
+++ b/relay/adaptor/ali/main.go
@@ -1,323 +1,279 @@
package ali
-// import (
-// "github.com/songquanpeng/one-api/common"
-// )
+import (
+ "bufio"
+ "encoding/json"
+ "io"
+ "net/http"
+ "strings"
-// // https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.2399480.0.0.1adb778fAdzP9w#341800c0f8w0r
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/gin-gonic/gin"
+)
-// type AliMessage struct {
-// Content string `json:"content"`
-// Role string `json:"role"`
-// }
+// https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.2399480.0.0.1adb778fAdzP9w#341800c0f8w0r
-// type AliInput struct {
-// //Prompt string `json:"prompt"`
-// Messages []AliMessage `json:"messages"`
-// }
+const EnableSearchModelSuffix = "-internet"
-// type AliParameters struct {
-// TopP float64 `json:"top_p,omitempty"`
-// TopK int `json:"top_k,omitempty"`
-// Seed uint64 `json:"seed,omitempty"`
-// EnableSearch bool `json:"enable_search,omitempty"`
-// }
+func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
+ messages := make([]Message, 0, len(request.Messages))
+ for i := 0; i < len(request.Messages); i++ {
+ message := request.Messages[i]
+ messages = append(messages, Message{
+ Content: message.StringContent(),
+ Role: strings.ToLower(message.Role),
+ })
+ }
+ enableSearch := false
+ aliModel := request.Model
+ if strings.HasSuffix(aliModel, EnableSearchModelSuffix) {
+ enableSearch = true
+ aliModel = strings.TrimSuffix(aliModel, EnableSearchModelSuffix)
+ }
+ if request.TopP >= 1 {
+ request.TopP = 0.9999
+ }
+ return &ChatRequest{
+ Model: aliModel,
+ Input: Input{
+ Messages: messages,
+ },
+ Parameters: Parameters{
+ EnableSearch: enableSearch,
+ IncrementalOutput: request.Stream,
+ Seed: uint64(request.Seed),
+ MaxTokens: request.MaxTokens,
+ Temperature: request.Temperature,
+ TopP: request.TopP,
+ TopK: request.TopK,
+ ResultFormat: "message",
+ Tools: request.Tools,
+ },
+ }
+}
-// type AliChatRequest struct {
-// Model string `json:"model"`
-// Input AliInput `json:"input"`
-// Parameters AliParameters `json:"parameters,omitempty"`
-// }
+func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
+ return &EmbeddingRequest{
+ Model: "text-embedding-v1",
+ Input: struct {
+ Texts []string `json:"texts"`
+ }{
+ Texts: request.ParseInput(),
+ },
+ }
+}
-// type AliEmbeddingRequest struct {
-// Model string `json:"model"`
-// Input struct {
-// Texts []string `json:"texts"`
-// } `json:"input"`
-// Parameters *struct {
-// TextType string `json:"text_type,omitempty"`
-// } `json:"parameters,omitempty"`
-// }
+func ConvertImageRequest(request model.ImageRequest) *ImageRequest {
+ var imageRequest ImageRequest
+ imageRequest.Input.Prompt = request.Prompt
+ imageRequest.Model = request.Model
+ imageRequest.Parameters.Size = strings.Replace(request.Size, "x", "*", -1)
+ imageRequest.Parameters.N = request.N
+ imageRequest.ResponseFormat = request.ResponseFormat
-// type AliEmbedding struct {
-// Embedding []float64 `json:"embedding"`
-// TextIndex int `json:"text_index"`
-// }
+ return &imageRequest
+}
-// type AliEmbeddingResponse struct {
-// Output struct {
-// Embeddings []AliEmbedding `json:"embeddings"`
-// } `json:"output"`
-// Usage AliUsage `json:"usage"`
-// AliError
-// }
+func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var aliResponse EmbeddingResponse
+ err := json.NewDecoder(resp.Body).Decode(&aliResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
+ }
-// type AliError struct {
-// Code string `json:"code"`
-// Message string `json:"message"`
-// RequestId string `json:"request_id"`
-// }
+ err = resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
-// type AliUsage struct {
-// InputTokens int `json:"input_tokens"`
-// OutputTokens int `json:"output_tokens"`
-// TotalTokens int `json:"total_tokens"`
-// }
+ if aliResponse.Code != "" {
+ return &model.ErrorWithStatusCode{
+ Error: model.Error{
+ Message: aliResponse.Message,
+ Type: aliResponse.Code,
+ Param: aliResponse.RequestId,
+ Code: aliResponse.Code,
+ },
+ StatusCode: resp.StatusCode,
+ }, nil
+ }
-// type AliOutput struct {
-// Text string `json:"text"`
-// FinishReason string `json:"finish_reason"`
-// }
+ fullTextResponse := embeddingResponseAli2OpenAI(&aliResponse)
+ jsonResponse, err := json.Marshal(fullTextResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ c.Writer.Header().Set("Content-Type", "application/json")
+ c.Writer.WriteHeader(resp.StatusCode)
+ _, err = c.Writer.Write(jsonResponse)
+ return nil, &fullTextResponse.Usage
+}
-// type AliChatResponse struct {
-// Output AliOutput `json:"output"`
-// Usage AliUsage `json:"usage"`
-// AliError
-// }
+func embeddingResponseAli2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse {
+ openAIEmbeddingResponse := openai.EmbeddingResponse{
+ Object: "list",
+ Data: make([]openai.EmbeddingResponseItem, 0, len(response.Output.Embeddings)),
+ Model: "text-embedding-v1",
+ Usage: model.Usage{TotalTokens: response.Usage.TotalTokens},
+ }
-// func requestOpenAI2Ali(request GeneralOpenAIRequest) *AliChatRequest {
-// messages := make([]AliMessage, 0, len(request.Messages))
-// prompt := ""
-// for i := 0; i < len(request.Messages); i++ {
-// message := request.Messages[i]
-// if message.Role == "system" {
-// messages = append(messages, AliMessage{
-// User: message.Content,
-// Bot: "Okay",
-// })
-// continue
-// } else {
-// if i == len(request.Messages)-1 {
-// prompt = message.Content
-// break
-// }
-// messages = append(messages, AliMessage{
-// User: message.Content,
-// Bot: request.Messages[i+1].Content,
-// })
-// i++
-// }
-// }
-// return &AliChatRequest{
-// Model: request.Model,
-// Input: AliInput{
-// Prompt: prompt,
-// History: messages,
-// },
-// //Parameters: AliParameters{ // ChatGPT's parameters are not compatible with Ali's
-// // TopP: request.TopP,
-// // TopK: 50,
-// // //Seed: 0,
-// // //EnableSearch: false,
-// //},
-// }
-// }
+ for _, item := range response.Output.Embeddings {
+ openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{
+ Object: `embedding`,
+ Index: item.TextIndex,
+ Embedding: item.Embedding,
+ })
+ }
+ return &openAIEmbeddingResponse
+}
-// func embeddingRequestOpenAI2Ali(request GeneralOpenAIRequest) *AliEmbeddingRequest {
-// return &AliEmbeddingRequest{
-// Model: "text-embedding-v1",
-// Input: struct {
-// Texts []string `json:"texts"`
-// }{
-// Texts: request.ParseInput(),
-// },
-// }
-// }
+func responseAli2OpenAI(response *ChatResponse) *openai.TextResponse {
+ fullTextResponse := openai.TextResponse{
+ Id: response.RequestId,
+ Object: "chat.completion",
+ Created: helper.GetTimestamp(),
+ Choices: response.Output.Choices,
+ Usage: model.Usage{
+ PromptTokens: response.Usage.InputTokens,
+ CompletionTokens: response.Usage.OutputTokens,
+ TotalTokens: response.Usage.InputTokens + response.Usage.OutputTokens,
+ },
+ }
+ return &fullTextResponse
+}
-// func aliEmbeddingHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
-// var aliResponse AliEmbeddingResponse
-// err := json.NewDecoder(resp.Body).Decode(&aliResponse)
-// if err != nil {
-// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
-// }
+func streamResponseAli2OpenAI(aliResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
+ if len(aliResponse.Output.Choices) == 0 {
+ return nil
+ }
+ aliChoice := aliResponse.Output.Choices[0]
+ var choice openai.ChatCompletionsStreamResponseChoice
+ choice.Delta = aliChoice.Message
+ if aliChoice.FinishReason != "null" {
+ finishReason := aliChoice.FinishReason
+ choice.FinishReason = &finishReason
+ }
+ response := openai.ChatCompletionsStreamResponse{
+ Id: aliResponse.RequestId,
+ Object: "chat.completion.chunk",
+ Created: helper.GetTimestamp(),
+ Model: "qwen",
+ Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
+ }
+ return &response
+}
-// err = resp.Body.Close()
-// if err != nil {
-// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
+func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var usage model.Usage
+ scanner := bufio.NewScanner(resp.Body)
+ scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+ if atEOF && len(data) == 0 {
+ return 0, nil, nil
+ }
+ if i := strings.Index(string(data), "\n"); i >= 0 {
+ return i + 1, data[0:i], nil
+ }
+ if atEOF {
+ return len(data), data, nil
+ }
+ return 0, nil, nil
+ })
+ dataChan := make(chan string)
+ stopChan := make(chan bool)
+ go func() {
+ for scanner.Scan() {
+ data := scanner.Text()
+ if len(data) < 5 { // ignore blank line or wrong format
+ continue
+ }
+ if data[:5] != "data:" {
+ continue
+ }
+ data = data[5:]
+ dataChan <- data
+ }
+ stopChan <- true
+ }()
+ common.SetEventStreamHeaders(c)
+ //lastResponseText := ""
+ c.Stream(func(w io.Writer) bool {
+ select {
+ case data := <-dataChan:
+ var aliResponse ChatResponse
+ err := json.Unmarshal([]byte(data), &aliResponse)
+ if err != nil {
+ logger.SysError("error unmarshalling stream response: " + err.Error())
+ return true
+ }
+ if aliResponse.Usage.OutputTokens != 0 {
+ usage.PromptTokens = aliResponse.Usage.InputTokens
+ usage.CompletionTokens = aliResponse.Usage.OutputTokens
+ usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens
+ }
+ response := streamResponseAli2OpenAI(&aliResponse)
+ if response == nil {
+ return true
+ }
+ //response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
+ //lastResponseText = aliResponse.Output.Text
+ jsonResponse, err := json.Marshal(response)
+ if err != nil {
+ logger.SysError("error marshalling stream response: " + err.Error())
+ return true
+ }
+ c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
+ return true
+ case <-stopChan:
+ c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
+ return false
+ }
+ })
+ err := resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ return nil, &usage
+}
-// if aliResponse.Code != "" {
-// return &OpenAIErrorWithStatusCode{
-// OpenAIError: OpenAIError{
-// Message: aliResponse.Message,
-// Type: aliResponse.Code,
-// Param: aliResponse.RequestId,
-// Code: aliResponse.Code,
-// },
-// StatusCode: resp.StatusCode,
-// }, nil
-// }
-
-// fullTextResponse := embeddingResponseAli2OpenAI(&aliResponse)
-// jsonResponse, err := json.Marshal(fullTextResponse)
-// if err != nil {
-// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// c.Writer.Header().Set("Content-Type", "application/json")
-// c.Writer.WriteHeader(resp.StatusCode)
-// _, err = c.Writer.Write(jsonResponse)
-// return nil, &fullTextResponse.Usage
-// }
-
-// func embeddingResponseAli2OpenAI(response *AliEmbeddingResponse) *OpenAIEmbeddingResponse {
-// openAIEmbeddingResponse := OpenAIEmbeddingResponse{
-// Object: "list",
-// Data: make([]OpenAIEmbeddingResponseItem, 0, len(response.Output.Embeddings)),
-// Model: "text-embedding-v1",
-// Usage: Usage{TotalTokens: response.Usage.TotalTokens},
-// }
-
-// for _, item := range response.Output.Embeddings {
-// openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, OpenAIEmbeddingResponseItem{
-// Object: `embedding`,
-// Index: item.TextIndex,
-// Embedding: item.Embedding,
-// })
-// }
-// return &openAIEmbeddingResponse
-// }
-
-// func responseAli2OpenAI(response *AliChatResponse) *OpenAITextResponse {
-// choice := OpenAITextResponseChoice{
-// Index: 0,
-// Message: Message{
-// Role: "assistant",
-// Content: response.Output.Text,
-// },
-// FinishReason: response.Output.FinishReason,
-// }
-// fullTextResponse := OpenAITextResponse{
-// Id: response.RequestId,
-// Object: "chat.completion",
-// Created: common.GetTimestamp(),
-// Choices: []OpenAITextResponseChoice{choice},
-// Usage: Usage{
-// PromptTokens: response.Usage.InputTokens,
-// CompletionTokens: response.Usage.OutputTokens,
-// TotalTokens: response.Usage.InputTokens + response.Usage.OutputTokens,
-// },
-// }
-// return &fullTextResponse
-// }
-
-// func streamResponseAli2OpenAI(aliResponse *AliChatResponse) *ChatCompletionsStreamResponse {
-// var choice ChatCompletionsStreamResponseChoice
-// choice.Delta.Content = aliResponse.Output.Text
-// if aliResponse.Output.FinishReason != "null" {
-// finishReason := aliResponse.Output.FinishReason
-// choice.FinishReason = &finishReason
-// }
-// response := ChatCompletionsStreamResponse{
-// Id: aliResponse.RequestId,
-// Object: "chat.completion.chunk",
-// Created: common.GetTimestamp(),
-// Model: "ernie-bot",
-// Choices: []ChatCompletionsStreamResponseChoice{choice},
-// }
-// return &response
-// }
-
-// func aliStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
-// var usage Usage
-// scanner := bufio.NewScanner(resp.Body)
-// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
-// if atEOF && len(data) == 0 {
-// return 0, nil, nil
-// }
-// if i := strings.Index(string(data), "\n"); i >= 0 {
-// return i + 1, data[0:i], nil
-// }
-// if atEOF {
-// return len(data), data, nil
-// }
-// return 0, nil, nil
-// })
-// dataChan := make(chan string)
-// stopChan := make(chan bool)
-// go func() {
-// for scanner.Scan() {
-// data := scanner.Text()
-// if len(data) < 5 { // ignore blank line or wrong format
-// continue
-// }
-// if data[:5] != "data:" {
-// continue
-// }
-// data = data[5:]
-// dataChan <- data
-// }
-// stopChan <- true
-// }()
-// setEventStreamHeaders(c)
-// lastResponseText := ""
-// c.Stream(func(w io.Writer) bool {
-// select {
-// case data := <-dataChan:
-// var aliResponse AliChatResponse
-// err := json.Unmarshal([]byte(data), &aliResponse)
-// if err != nil {
-// common.SysError("error unmarshalling stream response: " + err.Error())
-// return true
-// }
-// if aliResponse.Usage.OutputTokens != 0 {
-// usage.PromptTokens = aliResponse.Usage.InputTokens
-// usage.CompletionTokens = aliResponse.Usage.OutputTokens
-// usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens
-// }
-// response := streamResponseAli2OpenAI(&aliResponse)
-// response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
-// lastResponseText = aliResponse.Output.Text
-// jsonResponse, err := json.Marshal(response)
-// if err != nil {
-// common.SysError("error marshalling stream response: " + err.Error())
-// return true
-// }
-// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
-// return true
-// case <-stopChan:
-// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
-// return false
-// }
-// })
-// err := resp.Body.Close()
-// if err != nil {
-// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
-// return nil, &usage
-// }
-
-// func aliHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
-// var aliResponse AliChatResponse
-// responseBody, err := io.ReadAll(resp.Body)
-// if err != nil {
-// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = resp.Body.Close()
-// if err != nil {
-// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = json.Unmarshal(responseBody, &aliResponse)
-// if err != nil {
-// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// if aliResponse.Code != "" {
-// return &OpenAIErrorWithStatusCode{
-// OpenAIError: OpenAIError{
-// Message: aliResponse.Message,
-// Type: aliResponse.Code,
-// Param: aliResponse.RequestId,
-// Code: aliResponse.Code,
-// },
-// StatusCode: resp.StatusCode,
-// }, nil
-// }
-// fullTextResponse := responseAli2OpenAI(&aliResponse)
-// jsonResponse, err := json.Marshal(fullTextResponse)
-// if err != nil {
-// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// c.Writer.Header().Set("Content-Type", "application/json")
-// c.Writer.WriteHeader(resp.StatusCode)
-// _, err = c.Writer.Write(jsonResponse)
-// return nil, &fullTextResponse.Usage
-// }
+func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ ctx := c.Request.Context()
+ var aliResponse ChatResponse
+ responseBody, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ logger.Debugf(ctx, "response body: %s\n", responseBody)
+ err = json.Unmarshal(responseBody, &aliResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ if aliResponse.Code != "" {
+ return &model.ErrorWithStatusCode{
+ Error: model.Error{
+ Message: aliResponse.Message,
+ Type: aliResponse.Code,
+ Param: aliResponse.RequestId,
+ Code: aliResponse.Code,
+ },
+ StatusCode: resp.StatusCode,
+ }, nil
+ }
+ fullTextResponse := responseAli2OpenAI(&aliResponse)
+ fullTextResponse.Model = "qwen"
+ jsonResponse, err := json.Marshal(fullTextResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ c.Writer.Header().Set("Content-Type", "application/json")
+ c.Writer.WriteHeader(resp.StatusCode)
+ _, err = c.Writer.Write(jsonResponse)
+ return nil, &fullTextResponse.Usage
+}
diff --git a/relay/adaptor/ali/model.go b/relay/adaptor/ali/model.go
index 450b5f52..b247b434 100644
--- a/relay/adaptor/ali/model.go
+++ b/relay/adaptor/ali/model.go
@@ -1,8 +1,8 @@
package ali
import (
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/model"
)
type Message struct {
diff --git a/relay/adaptor/anthropic/adaptor.go b/relay/adaptor/anthropic/adaptor.go
index 07efb3c7..dfb42f2e 100644
--- a/relay/adaptor/anthropic/adaptor.go
+++ b/relay/adaptor/anthropic/adaptor.go
@@ -6,10 +6,10 @@ import (
"net/http"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/relay/adaptor"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
)
type Adaptor struct {
diff --git a/relay/adaptor/anthropic/main.go b/relay/adaptor/anthropic/main.go
index 79e55437..e366c993 100644
--- a/relay/adaptor/anthropic/main.go
+++ b/relay/adaptor/anthropic/main.go
@@ -8,13 +8,13 @@ import (
"net/http"
"strings"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/image"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/image"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/model"
)
func stopReasonClaude2OpenAI(reason *string) string {
diff --git a/relay/adaptor/aws/adapter.go b/relay/adaptor/aws/adapter.go
index 15767ceb..5d58d0cc 100644
--- a/relay/adaptor/aws/adapter.go
+++ b/relay/adaptor/aws/adapter.go
@@ -4,13 +4,13 @@ import (
"io"
"net/http"
- "github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/ctxkey"
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/adaptor/anthropic"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/relay/adaptor"
- "github.com/songquanpeng/one-api/relay/adaptor/anthropic"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
+ "github.com/pkg/errors"
)
var _ adaptor.Adaptor = new(Adaptor)
@@ -36,9 +36,8 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
}
claudeReq := anthropic.ConvertRequest(*request)
- c.Set(common.CtxKeyRequestModel, request.Model)
- c.Set(common.CtxKeyRawRequest, request)
- c.Set(common.CtxKeyConvertedRequest, claudeReq)
+ c.Set(ctxkey.RequestModel, request.Model)
+ c.Set(ctxkey.ConvertedRequest, claudeReq)
return claudeReq, nil
}
diff --git a/relay/adaptor/aws/main.go b/relay/adaptor/aws/main.go
index 3e3b7804..dc94572d 100644
--- a/relay/adaptor/aws/main.go
+++ b/relay/adaptor/aws/main.go
@@ -7,8 +7,14 @@ import (
"fmt"
"io"
"net/http"
- "strings"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/ctxkey"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/adaptor/anthropic"
+ relaymodel "github.com/Laisky/one-api/relay/model"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/credentials"
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
@@ -16,23 +22,14 @@ import (
"github.com/gin-gonic/gin"
"github.com/jinzhu/copier"
"github.com/pkg/errors"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/relay/adaptor/anthropic"
- relaymodel "github.com/songquanpeng/one-api/relay/model"
)
-func newAwsClient(channel *model.Channel) (*bedrockruntime.Client, error) {
- ks := strings.Split(channel.Key, "\n")
- if len(ks) != 2 {
- return nil, errors.New("invalid key")
- }
- ak, sk := ks[0], ks[1]
-
+func newAwsClient(c *gin.Context) (*bedrockruntime.Client, error) {
+ ak := c.GetString(config.KeyAK)
+ sk := c.GetString(config.KeySK)
+ region := c.GetString(config.KeyRegion)
client := bedrockruntime.New(bedrockruntime.Options{
- Region: *channel.BaseURL,
+ Region: region,
Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(ak, sk, "")),
})
@@ -43,7 +40,7 @@ func wrapErr(err error) *relaymodel.ErrorWithStatusCode {
return &relaymodel.ErrorWithStatusCode{
StatusCode: http.StatusInternalServerError,
Error: relaymodel.Error{
- Message: fmt.Sprintf("%+v", err),
+ Message: fmt.Sprintf("%s", err.Error()),
},
}
}
@@ -67,19 +64,12 @@ func awsModelID(requestModel string) (string, error) {
}
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
- var channel *model.Channel
- if channeli, ok := c.Get(common.CtxKeyChannel); !ok {
- return wrapErr(errors.New("channel not found")), nil
- } else {
- channel = channeli.(*model.Channel)
- }
-
- awsCli, err := newAwsClient(channel)
+ awsCli, err := newAwsClient(c)
if err != nil {
return wrapErr(errors.Wrap(err, "newAwsClient")), nil
}
- awsModelId, err := awsModelID(c.GetString(common.CtxKeyRequestModel))
+ awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
if err != nil {
return wrapErr(errors.Wrap(err, "awsModelID")), nil
}
@@ -90,11 +80,11 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
ContentType: aws.String("application/json"),
}
- claudeReqi, ok := c.Get(common.CtxKeyConvertedRequest)
+ claudeReq_, ok := c.Get(ctxkey.ConvertedRequest)
if !ok {
return wrapErr(errors.New("request not found")), nil
}
- claudeReq := claudeReqi.(*anthropic.Request)
+ claudeReq := claudeReq_.(*anthropic.Request)
awsClaudeReq := &Request{
AnthropicVersion: "bedrock-2023-05-31",
}
@@ -133,20 +123,12 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
func StreamHandler(c *gin.Context, resp *http.Response) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
createdTime := helper.GetTimestamp()
-
- var channel *model.Channel
- if channeli, ok := c.Get(common.CtxKeyChannel); !ok {
- return wrapErr(errors.New("channel not found")), nil
- } else {
- channel = channeli.(*model.Channel)
- }
-
- awsCli, err := newAwsClient(channel)
+ awsCli, err := newAwsClient(c)
if err != nil {
return wrapErr(errors.Wrap(err, "newAwsClient")), nil
}
- awsModelId, err := awsModelID(c.GetString(common.CtxKeyRequestModel))
+ awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
if err != nil {
return wrapErr(errors.Wrap(err, "awsModelID")), nil
}
@@ -157,11 +139,11 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*relaymodel.ErrorWithSt
ContentType: aws.String("application/json"),
}
- claudeReqi, ok := c.Get(common.CtxKeyConvertedRequest)
+ claudeReq_, ok := c.Get(ctxkey.ConvertedRequest)
if !ok {
return wrapErr(errors.New("request not found")), nil
}
- claudeReq := claudeReqi.(*anthropic.Request)
+ claudeReq := claudeReq_.(*anthropic.Request)
awsClaudeReq := &Request{
AnthropicVersion: "bedrock-2023-05-31",
@@ -211,7 +193,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*relaymodel.ErrorWithSt
return true
}
response.Id = id
- response.Model = c.GetString(common.CtxKeyOriginModel)
+ response.Model = c.GetString(ctxkey.OriginalModel)
response.Created = createdTime
jsonStr, err := json.Marshal(response)
if err != nil {
diff --git a/relay/adaptor/aws/model.go b/relay/adaptor/aws/model.go
index bcbfb584..9428cc02 100644
--- a/relay/adaptor/aws/model.go
+++ b/relay/adaptor/aws/model.go
@@ -1,6 +1,6 @@
package aws
-import "github.com/songquanpeng/one-api/relay/adaptor/anthropic"
+import "github.com/Laisky/one-api/relay/adaptor/anthropic"
// Request is the request to AWS Claude
//
diff --git a/relay/adaptor/azure/helper.go b/relay/adaptor/azure/helper.go
index dd207f37..b4b54379 100644
--- a/relay/adaptor/azure/helper.go
+++ b/relay/adaptor/azure/helper.go
@@ -1,8 +1,8 @@
package azure
import (
+ "github.com/Laisky/one-api/common/config"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
)
func GetAPIVersion(c *gin.Context) string {
diff --git a/relay/adaptor/baidu/adaptor.go b/relay/adaptor/baidu/adaptor.go
index 7d756d35..a7e2b47a 100644
--- a/relay/adaptor/baidu/adaptor.go
+++ b/relay/adaptor/baidu/adaptor.go
@@ -1,93 +1,143 @@
package baidu
-// import (
-// "github.com/Laisky/errors/v2"
-// "github.com/gin-gonic/gin"
-// "github.com/songquanpeng/one-api/relay/channel"
-// "github.com/songquanpeng/one-api/relay/constant"
-// "github.com/songquanpeng/one-api/relay/model"
-// "github.com/songquanpeng/one-api/relay/util"
-// "io"
-// "net/http"
-// )
+import (
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "strings"
-// // type Adaptor struct {
-// // }
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
+ "github.com/gin-gonic/gin"
+)
-// func (a *Adaptor) Init(meta *util.RelayMeta) {
+type Adaptor struct {
+}
-// }
+func (a *Adaptor) Init(meta *meta.Meta) {
-// func (a *Adaptor) GetRequestURL(meta *util.RelayMeta) (string, error) {
-// // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/clntwmv7t
-// var fullRequestURL string
-// switch meta.ActualModelName {
-// case "ERNIE-Bot-4":
-// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro"
-// case "ERNIE-Bot-8K":
-// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_bot_8k"
-// case "ERNIE-Bot":
-// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions"
-// case "ERNIE-Speed":
-// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_speed"
-// case "ERNIE-Bot-turbo":
-// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant"
-// case "BLOOMZ-7B":
-// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1"
-// case "Embedding-V1":
-// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/embedding-v1"
-// }
-// var accessToken string
-// var err error
-// if accessToken, err = GetAccessToken(meta.APIKey); err != nil {
-// return "", err
-// }
-// fullRequestURL += "?access_token=" + accessToken
-// return fullRequestURL, nil
-// }
+}
-// func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *util.RelayMeta) error {
-// channel.SetupCommonRequestHeader(c, req, meta)
-// req.Header.Set("Authorization", "Bearer "+meta.APIKey)
-// return nil
-// }
+func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
+ // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/clntwmv7t
+ suffix := "chat/"
+ if strings.HasPrefix(meta.ActualModelName, "Embedding") {
+ suffix = "embeddings/"
+ }
+ if strings.HasPrefix(meta.ActualModelName, "bge-large") {
+ suffix = "embeddings/"
+ }
+ if strings.HasPrefix(meta.ActualModelName, "tao-8k") {
+ suffix = "embeddings/"
+ }
+ switch meta.ActualModelName {
+ case "ERNIE-4.0":
+ suffix += "completions_pro"
+ case "ERNIE-Bot-4":
+ suffix += "completions_pro"
+ case "ERNIE-Bot":
+ suffix += "completions"
+ case "ERNIE-Bot-turbo":
+ suffix += "eb-instant"
+ case "ERNIE-Speed":
+ suffix += "ernie_speed"
+ case "ERNIE-4.0-8K":
+ suffix += "completions_pro"
+ case "ERNIE-3.5-8K":
+ suffix += "completions"
+ case "ERNIE-3.5-8K-0205":
+ suffix += "ernie-3.5-8k-0205"
+ case "ERNIE-3.5-8K-1222":
+ suffix += "ernie-3.5-8k-1222"
+ case "ERNIE-Bot-8K":
+ suffix += "ernie_bot_8k"
+ case "ERNIE-3.5-4K-0205":
+ suffix += "ernie-3.5-4k-0205"
+ case "ERNIE-Speed-8K":
+ suffix += "ernie_speed"
+ case "ERNIE-Speed-128K":
+ suffix += "ernie-speed-128k"
+ case "ERNIE-Lite-8K-0922":
+ suffix += "eb-instant"
+ case "ERNIE-Lite-8K-0308":
+ suffix += "ernie-lite-8k"
+ case "ERNIE-Tiny-8K":
+ suffix += "ernie-tiny-8k"
+ case "BLOOMZ-7B":
+ suffix += "bloomz_7b1"
+ case "Embedding-V1":
+ suffix += "embedding-v1"
+ case "bge-large-zh":
+ suffix += "bge_large_zh"
+ case "bge-large-en":
+ suffix += "bge_large_en"
+ case "tao-8k":
+ suffix += "tao_8k"
+ default:
+ suffix += strings.ToLower(meta.ActualModelName)
+ }
+ fullRequestURL := fmt.Sprintf("%s/rpc/2.0/ai_custom/v1/wenxinworkshop/%s", meta.BaseURL, suffix)
+ var accessToken string
+ var err error
+ if accessToken, err = GetAccessToken(meta.APIKey); err != nil {
+ return "", err
+ }
+ fullRequestURL += "?access_token=" + accessToken
+ return fullRequestURL, nil
+}
-// func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
-// if request == nil {
-// return nil, errors.New("request is nil")
-// }
-// switch relayMode {
-// case constant.RelayModeEmbeddings:
-// baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
-// return baiduEmbeddingRequest, nil
-// default:
-// baiduRequest := ConvertRequest(*request)
-// return baiduRequest, nil
-// }
-// }
+func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
+ adaptor.SetupCommonRequestHeader(c, req, meta)
+ req.Header.Set("Authorization", "Bearer "+meta.APIKey)
+ return nil
+}
-// func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
-// return channel.DoRequestHelper(a, c, meta, requestBody)
-// }
+func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ switch relayMode {
+ case relaymode.Embeddings:
+ baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
+ return baiduEmbeddingRequest, nil
+ default:
+ baiduRequest := ConvertRequest(*request)
+ return baiduRequest, nil
+ }
+}
-// func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *util.RelayMeta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
-// if meta.IsStream {
-// err, usage = StreamHandler(c, resp)
-// } else {
-// switch meta.Mode {
-// case constant.RelayModeEmbeddings:
-// err, usage = EmbeddingHandler(c, resp)
-// default:
-// err, usage = Handler(c, resp)
-// }
-// }
-// return
-// }
+func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ return request, nil
+}
-// func (a *Adaptor) GetModelList() []string {
-// return ModelList
-// }
+func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
+ return adaptor.DoRequestHelper(a, c, meta, requestBody)
+}
-// func (a *Adaptor) GetChannelName() string {
-// return "baidu"
-// }
+func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
+ if meta.IsStream {
+ err, usage = StreamHandler(c, resp)
+ } else {
+ switch meta.Mode {
+ case relaymode.Embeddings:
+ err, usage = EmbeddingHandler(c, resp)
+ default:
+ err, usage = Handler(c, resp)
+ }
+ }
+ return
+}
+
+func (a *Adaptor) GetModelList() []string {
+ return ModelList
+}
+
+func (a *Adaptor) GetChannelName() string {
+ return "baidu"
+}
diff --git a/relay/adaptor/baidu/main.go b/relay/adaptor/baidu/main.go
index e8ff7588..2f949f7b 100644
--- a/relay/adaptor/baidu/main.go
+++ b/relay/adaptor/baidu/main.go
@@ -1,359 +1,329 @@
package baidu
-// import (
-// "bufio"
-// "encoding/json"
-// "github.com/Laisky/errors/v2"
-// "fmt"
-// "github.com/gin-gonic/gin"
-// "io"
-// "net/http"
-// "one-api/common"
-// "strings"
-// "sync"
-// "time"
-// )
+import (
+ "bufio"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "strings"
+ "sync"
+ "time"
-// // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/client"
+ "github.com/Laisky/one-api/relay/constant"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/gin-gonic/gin"
+)
-// type BaiduTokenResponse struct {
-// ExpiresIn int `json:"expires_in"`
-// AccessToken string `json:"access_token"`
-// }
+// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2
-// type BaiduMessage struct {
-// Role string `json:"role"`
-// Content string `json:"content"`
-// }
+type TokenResponse struct {
+ ExpiresIn int `json:"expires_in"`
+ AccessToken string `json:"access_token"`
+}
-// type BaiduChatRequest struct {
-// Messages []BaiduMessage `json:"messages"`
-// Stream bool `json:"stream"`
-// UserId string `json:"user_id,omitempty"`
-// }
+type Message struct {
+ Role string `json:"role"`
+ Content string `json:"content"`
+}
-// type BaiduError struct {
-// ErrorCode int `json:"error_code"`
-// ErrorMsg string `json:"error_msg"`
-// }
+type ChatRequest struct {
+ Messages []Message `json:"messages"`
+ Temperature float64 `json:"temperature,omitempty"`
+ TopP float64 `json:"top_p,omitempty"`
+ PenaltyScore float64 `json:"penalty_score,omitempty"`
+ Stream bool `json:"stream,omitempty"`
+ System string `json:"system,omitempty"`
+ DisableSearch bool `json:"disable_search,omitempty"`
+ EnableCitation bool `json:"enable_citation,omitempty"`
+ MaxOutputTokens int `json:"max_output_tokens,omitempty"`
+ UserId string `json:"user_id,omitempty"`
+}
-// type BaiduChatResponse struct {
-// Id string `json:"id"`
-// Object string `json:"object"`
-// Created int64 `json:"created"`
-// Result string `json:"result"`
-// IsTruncated bool `json:"is_truncated"`
-// NeedClearHistory bool `json:"need_clear_history"`
-// Usage Usage `json:"usage"`
-// BaiduError
-// }
+type Error struct {
+ ErrorCode int `json:"error_code"`
+ ErrorMsg string `json:"error_msg"`
+}
-// type BaiduChatStreamResponse struct {
-// BaiduChatResponse
-// SentenceId int `json:"sentence_id"`
-// IsEnd bool `json:"is_end"`
-// }
+var baiduTokenStore sync.Map
-// type BaiduEmbeddingRequest struct {
-// Input []string `json:"input"`
-// }
+func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
+ baiduRequest := ChatRequest{
+ Messages: make([]Message, 0, len(request.Messages)),
+ Temperature: request.Temperature,
+ TopP: request.TopP,
+ PenaltyScore: request.FrequencyPenalty,
+ Stream: request.Stream,
+ DisableSearch: false,
+ EnableCitation: false,
+ MaxOutputTokens: request.MaxTokens,
+ UserId: request.User,
+ }
+ for _, message := range request.Messages {
+ if message.Role == "system" {
+ baiduRequest.System = message.StringContent()
+ } else {
+ baiduRequest.Messages = append(baiduRequest.Messages, Message{
+ Role: message.Role,
+ Content: message.StringContent(),
+ })
+ }
+ }
+ return &baiduRequest
+}
-// type BaiduEmbeddingData struct {
-// Object string `json:"object"`
-// Embedding []float64 `json:"embedding"`
-// Index int `json:"index"`
-// }
+func responseBaidu2OpenAI(response *ChatResponse) *openai.TextResponse {
+ choice := openai.TextResponseChoice{
+ Index: 0,
+ Message: model.Message{
+ Role: "assistant",
+ Content: response.Result,
+ },
+ FinishReason: "stop",
+ }
+ fullTextResponse := openai.TextResponse{
+ Id: response.Id,
+ Object: "chat.completion",
+ Created: response.Created,
+ Choices: []openai.TextResponseChoice{choice},
+ Usage: response.Usage,
+ }
+ return &fullTextResponse
+}
-// type BaiduEmbeddingResponse struct {
-// Id string `json:"id"`
-// Object string `json:"object"`
-// Created int64 `json:"created"`
-// Data []BaiduEmbeddingData `json:"data"`
-// Usage Usage `json:"usage"`
-// BaiduError
-// }
+func streamResponseBaidu2OpenAI(baiduResponse *ChatStreamResponse) *openai.ChatCompletionsStreamResponse {
+ var choice openai.ChatCompletionsStreamResponseChoice
+ choice.Delta.Content = baiduResponse.Result
+ if baiduResponse.IsEnd {
+ choice.FinishReason = &constant.StopFinishReason
+ }
+ response := openai.ChatCompletionsStreamResponse{
+ Id: baiduResponse.Id,
+ Object: "chat.completion.chunk",
+ Created: baiduResponse.Created,
+ Model: "ernie-bot",
+ Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
+ }
+ return &response
+}
-// type BaiduAccessToken struct {
-// AccessToken string `json:"access_token"`
-// Error string `json:"error,omitempty"`
-// ErrorDescription string `json:"error_description,omitempty"`
-// ExpiresIn int64 `json:"expires_in,omitempty"`
-// ExpiresAt time.Time `json:"-"`
-// }
+func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
+ return &EmbeddingRequest{
+ Input: request.ParseInput(),
+ }
+}
-// var baiduTokenStore sync.Map
+func embeddingResponseBaidu2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse {
+ openAIEmbeddingResponse := openai.EmbeddingResponse{
+ Object: "list",
+ Data: make([]openai.EmbeddingResponseItem, 0, len(response.Data)),
+ Model: "baidu-embedding",
+ Usage: response.Usage,
+ }
+ for _, item := range response.Data {
+ openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{
+ Object: item.Object,
+ Index: item.Index,
+ Embedding: item.Embedding,
+ })
+ }
+ return &openAIEmbeddingResponse
+}
-// func requestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduChatRequest {
-// messages := make([]BaiduMessage, 0, len(request.Messages))
-// for _, message := range request.Messages {
-// if message.Role == "system" {
-// messages = append(messages, BaiduMessage{
-// Role: "user",
-// Content: message.Content,
-// })
-// messages = append(messages, BaiduMessage{
-// Role: "assistant",
-// Content: "Okay",
-// })
-// } else {
-// messages = append(messages, BaiduMessage{
-// Role: message.Role,
-// Content: message.Content,
-// })
-// }
-// }
-// return &BaiduChatRequest{
-// Messages: messages,
-// Stream: request.Stream,
-// }
-// }
+func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var usage model.Usage
+ scanner := bufio.NewScanner(resp.Body)
+ scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+ if atEOF && len(data) == 0 {
+ return 0, nil, nil
+ }
+ if i := strings.Index(string(data), "\n"); i >= 0 {
+ return i + 1, data[0:i], nil
+ }
+ if atEOF {
+ return len(data), data, nil
+ }
+ return 0, nil, nil
+ })
+ dataChan := make(chan string)
+ stopChan := make(chan bool)
+ go func() {
+ for scanner.Scan() {
+ data := scanner.Text()
+ if len(data) < 6 { // ignore blank line or wrong format
+ continue
+ }
+ data = data[6:]
+ dataChan <- data
+ }
+ stopChan <- true
+ }()
+ common.SetEventStreamHeaders(c)
+ c.Stream(func(w io.Writer) bool {
+ select {
+ case data := <-dataChan:
+ var baiduResponse ChatStreamResponse
+ err := json.Unmarshal([]byte(data), &baiduResponse)
+ if err != nil {
+ logger.SysError("error unmarshalling stream response: " + err.Error())
+ return true
+ }
+ if baiduResponse.Usage.TotalTokens != 0 {
+ usage.TotalTokens = baiduResponse.Usage.TotalTokens
+ usage.PromptTokens = baiduResponse.Usage.PromptTokens
+ usage.CompletionTokens = baiduResponse.Usage.TotalTokens - baiduResponse.Usage.PromptTokens
+ }
+ response := streamResponseBaidu2OpenAI(&baiduResponse)
+ jsonResponse, err := json.Marshal(response)
+ if err != nil {
+ logger.SysError("error marshalling stream response: " + err.Error())
+ return true
+ }
+ c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
+ return true
+ case <-stopChan:
+ c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
+ return false
+ }
+ })
+ err := resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ return nil, &usage
+}
-// func responseBaidu2OpenAI(response *BaiduChatResponse) *OpenAITextResponse {
-// choice := OpenAITextResponseChoice{
-// Index: 0,
-// Message: Message{
-// Role: "assistant",
-// Content: response.Result,
-// },
-// FinishReason: "stop",
-// }
-// fullTextResponse := OpenAITextResponse{
-// Id: response.Id,
-// Object: "chat.completion",
-// Created: response.Created,
-// Choices: []OpenAITextResponseChoice{choice},
-// Usage: response.Usage,
-// }
-// return &fullTextResponse
-// }
+func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var baiduResponse ChatResponse
+ responseBody, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = json.Unmarshal(responseBody, &baiduResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ if baiduResponse.ErrorMsg != "" {
+ return &model.ErrorWithStatusCode{
+ Error: model.Error{
+ Message: baiduResponse.ErrorMsg,
+ Type: "baidu_error",
+ Param: "",
+ Code: baiduResponse.ErrorCode,
+ },
+ StatusCode: resp.StatusCode,
+ }, nil
+ }
+ fullTextResponse := responseBaidu2OpenAI(&baiduResponse)
+ fullTextResponse.Model = "ernie-bot"
+ jsonResponse, err := json.Marshal(fullTextResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ c.Writer.Header().Set("Content-Type", "application/json")
+ c.Writer.WriteHeader(resp.StatusCode)
+ _, err = c.Writer.Write(jsonResponse)
+ return nil, &fullTextResponse.Usage
+}
-// func streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStreamResponse) *ChatCompletionsStreamResponse {
-// var choice ChatCompletionsStreamResponseChoice
-// choice.Delta.Content = baiduResponse.Result
-// if baiduResponse.IsEnd {
-// choice.FinishReason = &stopFinishReason
-// }
-// response := ChatCompletionsStreamResponse{
-// Id: baiduResponse.Id,
-// Object: "chat.completion.chunk",
-// Created: baiduResponse.Created,
-// Model: "ernie-bot",
-// Choices: []ChatCompletionsStreamResponseChoice{choice},
-// }
-// return &response
-// }
+func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var baiduResponse EmbeddingResponse
+ responseBody, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = json.Unmarshal(responseBody, &baiduResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ if baiduResponse.ErrorMsg != "" {
+ return &model.ErrorWithStatusCode{
+ Error: model.Error{
+ Message: baiduResponse.ErrorMsg,
+ Type: "baidu_error",
+ Param: "",
+ Code: baiduResponse.ErrorCode,
+ },
+ StatusCode: resp.StatusCode,
+ }, nil
+ }
+ fullTextResponse := embeddingResponseBaidu2OpenAI(&baiduResponse)
+ jsonResponse, err := json.Marshal(fullTextResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ c.Writer.Header().Set("Content-Type", "application/json")
+ c.Writer.WriteHeader(resp.StatusCode)
+ _, err = c.Writer.Write(jsonResponse)
+ return nil, &fullTextResponse.Usage
+}
-// func embeddingRequestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduEmbeddingRequest {
-// return &BaiduEmbeddingRequest{
-// Input: request.ParseInput(),
-// }
-// }
+func GetAccessToken(apiKey string) (string, error) {
+ if val, ok := baiduTokenStore.Load(apiKey); ok {
+ var accessToken AccessToken
+ if accessToken, ok = val.(AccessToken); ok {
+ // soon this will expire
+ if time.Now().Add(time.Hour).After(accessToken.ExpiresAt) {
+ go func() {
+ _, _ = getBaiduAccessTokenHelper(apiKey)
+ }()
+ }
+ return accessToken.AccessToken, nil
+ }
+ }
+ accessToken, err := getBaiduAccessTokenHelper(apiKey)
+ if err != nil {
+ return "", err
+ }
+ if accessToken == nil {
+ return "", errors.New("GetAccessToken return a nil token")
+ }
+ return (*accessToken).AccessToken, nil
+}
-// func embeddingResponseBaidu2OpenAI(response *BaiduEmbeddingResponse) *OpenAIEmbeddingResponse {
-// openAIEmbeddingResponse := OpenAIEmbeddingResponse{
-// Object: "list",
-// Data: make([]OpenAIEmbeddingResponseItem, 0, len(response.Data)),
-// Model: "baidu-embedding",
-// Usage: response.Usage,
-// }
-// for _, item := range response.Data {
-// openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, OpenAIEmbeddingResponseItem{
-// Object: item.Object,
-// Index: item.Index,
-// Embedding: item.Embedding,
-// })
-// }
-// return &openAIEmbeddingResponse
-// }
+func getBaiduAccessTokenHelper(apiKey string) (*AccessToken, error) {
+ parts := strings.Split(apiKey, "|")
+ if len(parts) != 2 {
+ return nil, errors.New("invalid baidu apikey")
+ }
+ req, err := http.NewRequest("POST", fmt.Sprintf("https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=%s&client_secret=%s",
+ parts[0], parts[1]), nil)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Add("Content-Type", "application/json")
+ req.Header.Add("Accept", "application/json")
+ res, err := client.ImpatientHTTPClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer res.Body.Close()
-// func baiduStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
-// var usage Usage
-// scanner := bufio.NewScanner(resp.Body)
-// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
-// if atEOF && len(data) == 0 {
-// return 0, nil, nil
-// }
-// if i := strings.Index(string(data), "\n"); i >= 0 {
-// return i + 1, data[0:i], nil
-// }
-// if atEOF {
-// return len(data), data, nil
-// }
-// return 0, nil, nil
-// })
-// dataChan := make(chan string)
-// stopChan := make(chan bool)
-// go func() {
-// for scanner.Scan() {
-// data := scanner.Text()
-// if len(data) < 6 { // ignore blank line or wrong format
-// continue
-// }
-// data = data[6:]
-// dataChan <- data
-// }
-// stopChan <- true
-// }()
-// setEventStreamHeaders(c)
-// c.Stream(func(w io.Writer) bool {
-// select {
-// case data := <-dataChan:
-// var baiduResponse BaiduChatStreamResponse
-// err := json.Unmarshal([]byte(data), &baiduResponse)
-// if err != nil {
-// common.SysError("error unmarshalling stream response: " + err.Error())
-// return true
-// }
-// if baiduResponse.Usage.TotalTokens != 0 {
-// usage.TotalTokens = baiduResponse.Usage.TotalTokens
-// usage.PromptTokens = baiduResponse.Usage.PromptTokens
-// usage.CompletionTokens = baiduResponse.Usage.TotalTokens - baiduResponse.Usage.PromptTokens
-// }
-// response := streamResponseBaidu2OpenAI(&baiduResponse)
-// jsonResponse, err := json.Marshal(response)
-// if err != nil {
-// common.SysError("error marshalling stream response: " + err.Error())
-// return true
-// }
-// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
-// return true
-// case <-stopChan:
-// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
-// return false
-// }
-// })
-// err := resp.Body.Close()
-// if err != nil {
-// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
-// return nil, &usage
-// }
-
-// func baiduHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
-// var baiduResponse BaiduChatResponse
-// responseBody, err := io.ReadAll(resp.Body)
-// if err != nil {
-// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = resp.Body.Close()
-// if err != nil {
-// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = json.Unmarshal(responseBody, &baiduResponse)
-// if err != nil {
-// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// if baiduResponse.ErrorMsg != "" {
-// return &OpenAIErrorWithStatusCode{
-// OpenAIError: OpenAIError{
-// Message: baiduResponse.ErrorMsg,
-// Type: "baidu_error",
-// Param: "",
-// Code: baiduResponse.ErrorCode,
-// },
-// StatusCode: resp.StatusCode,
-// }, nil
-// }
-// fullTextResponse := responseBaidu2OpenAI(&baiduResponse)
-// jsonResponse, err := json.Marshal(fullTextResponse)
-// if err != nil {
-// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// c.Writer.Header().Set("Content-Type", "application/json")
-// c.Writer.WriteHeader(resp.StatusCode)
-// _, err = c.Writer.Write(jsonResponse)
-// return nil, &fullTextResponse.Usage
-// }
-
-// func baiduEmbeddingHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
-// var baiduResponse BaiduEmbeddingResponse
-// responseBody, err := io.ReadAll(resp.Body)
-// if err != nil {
-// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = resp.Body.Close()
-// if err != nil {
-// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = json.Unmarshal(responseBody, &baiduResponse)
-// if err != nil {
-// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// if baiduResponse.ErrorMsg != "" {
-// return &OpenAIErrorWithStatusCode{
-// OpenAIError: OpenAIError{
-// Message: baiduResponse.ErrorMsg,
-// Type: "baidu_error",
-// Param: "",
-// Code: baiduResponse.ErrorCode,
-// },
-// StatusCode: resp.StatusCode,
-// }, nil
-// }
-// fullTextResponse := embeddingResponseBaidu2OpenAI(&baiduResponse)
-// jsonResponse, err := json.Marshal(fullTextResponse)
-// if err != nil {
-// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// c.Writer.Header().Set("Content-Type", "application/json")
-// c.Writer.WriteHeader(resp.StatusCode)
-// _, err = c.Writer.Write(jsonResponse)
-// return nil, &fullTextResponse.Usage
-// }
-
-// func getBaiduAccessToken(apiKey string) (string, error) {
-// if val, ok := baiduTokenStore.Load(apiKey); ok {
-// var accessToken BaiduAccessToken
-// if accessToken, ok = val.(BaiduAccessToken); ok {
-// // soon this will expire
-// if time.Now().Add(time.Hour).After(accessToken.ExpiresAt) {
-// go func() {
-// _, _ = getBaiduAccessTokenHelper(apiKey)
-// }()
-// }
-// return accessToken.AccessToken, nil
-// }
-// }
-// accessToken, err := getBaiduAccessTokenHelper(apiKey)
-// if err != nil {
-// return "", err
-// }
-// if accessToken == nil {
-// return "", errors.New("getBaiduAccessToken return a nil token")
-// }
-// return (*accessToken).AccessToken, nil
-// }
-
-// func getBaiduAccessTokenHelper(apiKey string) (*BaiduAccessToken, error) {
-// parts := strings.Split(apiKey, "|")
-// if len(parts) != 2 {
-// return nil, errors.New("invalid baidu apikey")
-// }
-// req, err := http.NewRequest("POST", fmt.Sprintf("https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=%s&client_secret=%s",
-// parts[0], parts[1]), nil)
-// if err != nil {
-// return nil, err
-// }
-// req.Header.Add("Content-Type", "application/json")
-// req.Header.Add("Accept", "application/json")
-// res, err := impatientHTTPClient.Do(req)
-// if err != nil {
-// return nil, err
-// }
-// defer res.Body.Close()
-
-// var accessToken BaiduAccessToken
-// err = json.NewDecoder(res.Body).Decode(&accessToken)
-// if err != nil {
-// return nil, err
-// }
-// if accessToken.Error != "" {
-// return nil, errors.New(accessToken.Error + ": " + accessToken.ErrorDescription)
-// }
-// if accessToken.AccessToken == "" {
-// return nil, errors.New("getBaiduAccessTokenHelper get empty access token")
-// }
-// accessToken.ExpiresAt = time.Now().Add(time.Duration(accessToken.ExpiresIn) * time.Second)
-// baiduTokenStore.Store(apiKey, accessToken)
-// return &accessToken, nil
-// }
+ var accessToken AccessToken
+ err = json.NewDecoder(res.Body).Decode(&accessToken)
+ if err != nil {
+ return nil, err
+ }
+ if accessToken.Error != "" {
+ return nil, errors.New(accessToken.Error + ": " + accessToken.ErrorDescription)
+ }
+ if accessToken.AccessToken == "" {
+ return nil, errors.New("getBaiduAccessTokenHelper get empty access token")
+ }
+ accessToken.ExpiresAt = time.Now().Add(time.Duration(accessToken.ExpiresIn) * time.Second)
+ baiduTokenStore.Store(apiKey, accessToken)
+ return &accessToken, nil
+}
diff --git a/relay/adaptor/baidu/model.go b/relay/adaptor/baidu/model.go
index 14008c40..a9ab8ece 100644
--- a/relay/adaptor/baidu/model.go
+++ b/relay/adaptor/baidu/model.go
@@ -1,50 +1,51 @@
package baidu
-// import (
-// "github.com/songquanpeng/one-api/relay/channel/openai"
-// "time"
-// )
+import (
+ "time"
-// type ChatResponse struct {
-// Id string `json:"id"`
-// Object string `json:"object"`
-// Created int64 `json:"created"`
-// Result string `json:"result"`
-// IsTruncated bool `json:"is_truncated"`
-// NeedClearHistory bool `json:"need_clear_history"`
-// Usage openai.Usage `json:"usage"`
-// Error
-// }
+ "github.com/Laisky/one-api/relay/model"
+)
-// type ChatStreamResponse struct {
-// ChatResponse
-// SentenceId int `json:"sentence_id"`
-// IsEnd bool `json:"is_end"`
-// }
+type ChatResponse struct {
+ Id string `json:"id"`
+ Object string `json:"object"`
+ Created int64 `json:"created"`
+ Result string `json:"result"`
+ IsTruncated bool `json:"is_truncated"`
+ NeedClearHistory bool `json:"need_clear_history"`
+ Usage model.Usage `json:"usage"`
+ Error
+}
-// type EmbeddingRequest struct {
-// Input []string `json:"input"`
-// }
+type ChatStreamResponse struct {
+ ChatResponse
+ SentenceId int `json:"sentence_id"`
+ IsEnd bool `json:"is_end"`
+}
-// type EmbeddingData struct {
-// Object string `json:"object"`
-// Embedding []float64 `json:"embedding"`
-// Index int `json:"index"`
-// }
+type EmbeddingRequest struct {
+ Input []string `json:"input"`
+}
-// type EmbeddingResponse struct {
-// Id string `json:"id"`
-// Object string `json:"object"`
-// Created int64 `json:"created"`
-// Data []EmbeddingData `json:"data"`
-// Usage openai.Usage `json:"usage"`
-// Error
-// }
+type EmbeddingData struct {
+ Object string `json:"object"`
+ Embedding []float64 `json:"embedding"`
+ Index int `json:"index"`
+}
-// type AccessToken struct {
-// AccessToken string `json:"access_token"`
-// Error string `json:"error,omitempty"`
-// ErrorDescription string `json:"error_description,omitempty"`
-// ExpiresIn int64 `json:"expires_in,omitempty"`
-// ExpiresAt time.Time `json:"-"`
-// }
+type EmbeddingResponse struct {
+ Id string `json:"id"`
+ Object string `json:"object"`
+ Created int64 `json:"created"`
+ Data []EmbeddingData `json:"data"`
+ Usage model.Usage `json:"usage"`
+ Error
+}
+
+type AccessToken struct {
+ AccessToken string `json:"access_token"`
+ Error string `json:"error,omitempty"`
+ ErrorDescription string `json:"error_description,omitempty"`
+ ExpiresIn int64 `json:"expires_in,omitempty"`
+ ExpiresAt time.Time `json:"-"`
+}
diff --git a/relay/adaptor/common.go b/relay/adaptor/common.go
index 13f57132..bc2e140a 100644
--- a/relay/adaptor/common.go
+++ b/relay/adaptor/common.go
@@ -5,9 +5,9 @@ import (
"net/http"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/relay/client"
+ "github.com/Laisky/one-api/relay/meta"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/relay/client"
- "github.com/songquanpeng/one-api/relay/meta"
)
func SetupCommonRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) {
diff --git a/relay/adaptor/gemini/adaptor.go b/relay/adaptor/gemini/adaptor.go
index 1324740a..2346706c 100644
--- a/relay/adaptor/gemini/adaptor.go
+++ b/relay/adaptor/gemini/adaptor.go
@@ -6,13 +6,13 @@ import (
"net/http"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ channelhelper "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- channelhelper "github.com/songquanpeng/one-api/relay/adaptor"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
)
type Adaptor struct {
diff --git a/relay/adaptor/gemini/main.go b/relay/adaptor/gemini/main.go
index 27a9c023..8d1f297c 100644
--- a/relay/adaptor/gemini/main.go
+++ b/relay/adaptor/gemini/main.go
@@ -8,15 +8,15 @@ import (
"net/http"
"strings"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/image"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/random"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/constant"
- "github.com/songquanpeng/one-api/relay/model"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/image"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/constant"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
)
diff --git a/relay/adaptor/interface.go b/relay/adaptor/interface.go
index 01b2e2cb..6ff4301c 100644
--- a/relay/adaptor/interface.go
+++ b/relay/adaptor/interface.go
@@ -1,9 +1,9 @@
package adaptor
import (
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)
diff --git a/relay/adaptor/minimax/main.go b/relay/adaptor/minimax/main.go
index fc9b5d26..89f070b4 100644
--- a/relay/adaptor/minimax/main.go
+++ b/relay/adaptor/minimax/main.go
@@ -2,8 +2,8 @@ package minimax
import (
"fmt"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/relaymode"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/relaymode"
)
func GetRequestURL(meta *meta.Meta) (string, error) {
diff --git a/relay/adaptor/ollama/adaptor.go b/relay/adaptor/ollama/adaptor.go
index ec1b0c40..87848feb 100644
--- a/relay/adaptor/ollama/adaptor.go
+++ b/relay/adaptor/ollama/adaptor.go
@@ -6,11 +6,11 @@ import (
"net/http"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/relay/adaptor"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
- "github.com/songquanpeng/one-api/relay/relaymode"
)
type Adaptor struct {
diff --git a/relay/adaptor/ollama/main.go b/relay/adaptor/ollama/main.go
index a7e4c058..419767e7 100644
--- a/relay/adaptor/ollama/main.go
+++ b/relay/adaptor/ollama/main.go
@@ -5,18 +5,18 @@ import (
"context"
"encoding/json"
"fmt"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/random"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/random"
"io"
"net/http"
"strings"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/constant"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/constant"
- "github.com/songquanpeng/one-api/relay/model"
)
func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
diff --git a/relay/adaptor/openai/adaptor.go b/relay/adaptor/openai/adaptor.go
index 24cf718f..4d4e96c9 100644
--- a/relay/adaptor/openai/adaptor.go
+++ b/relay/adaptor/openai/adaptor.go
@@ -3,13 +3,13 @@ package openai
import (
"fmt"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/adaptor/minimax"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/relay/adaptor"
- "github.com/songquanpeng/one-api/relay/adaptor/minimax"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
- "github.com/songquanpeng/one-api/relay/relaymode"
"io"
"net/http"
"strings"
@@ -39,7 +39,7 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
task := strings.TrimPrefix(requestURL, "/v1/")
model_ := meta.ActualModelName
model_ = strings.Replace(model_, ".", "", -1)
- //https://github.com/songquanpeng/one-api/issues/1191
+ //https://github.com/Laisky/one-api/issues/1191
// {your endpoint}/openai/deployments/{your azure_model}/chat/completions?api-version={api_version}
requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task)
return GetFullRequestURL(meta.BaseURL, requestURL, meta.ChannelType), nil
@@ -58,7 +58,7 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
}
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
if meta.ChannelType == channeltype.OpenRouter {
- req.Header.Set("HTTP-Referer", "https://github.com/songquanpeng/one-api")
+ req.Header.Set("HTTP-Referer", "https://github.com/Laisky/one-api")
req.Header.Set("X-Title", "One API")
}
return nil
diff --git a/relay/adaptor/openai/compatible.go b/relay/adaptor/openai/compatible.go
index 200eac44..330e1b21 100644
--- a/relay/adaptor/openai/compatible.go
+++ b/relay/adaptor/openai/compatible.go
@@ -1,15 +1,15 @@
package openai
import (
- "github.com/songquanpeng/one-api/relay/adaptor/ai360"
- "github.com/songquanpeng/one-api/relay/adaptor/baichuan"
- "github.com/songquanpeng/one-api/relay/adaptor/groq"
- "github.com/songquanpeng/one-api/relay/adaptor/lingyiwanwu"
- "github.com/songquanpeng/one-api/relay/adaptor/minimax"
- "github.com/songquanpeng/one-api/relay/adaptor/mistral"
- "github.com/songquanpeng/one-api/relay/adaptor/moonshot"
- "github.com/songquanpeng/one-api/relay/adaptor/stepfun"
- "github.com/songquanpeng/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/adaptor/ai360"
+ "github.com/Laisky/one-api/relay/adaptor/baichuan"
+ "github.com/Laisky/one-api/relay/adaptor/groq"
+ "github.com/Laisky/one-api/relay/adaptor/lingyiwanwu"
+ "github.com/Laisky/one-api/relay/adaptor/minimax"
+ "github.com/Laisky/one-api/relay/adaptor/mistral"
+ "github.com/Laisky/one-api/relay/adaptor/moonshot"
+ "github.com/Laisky/one-api/relay/adaptor/stepfun"
+ "github.com/Laisky/one-api/relay/channeltype"
)
var CompatibleChannels = []int{
diff --git a/relay/adaptor/openai/helper.go b/relay/adaptor/openai/helper.go
index 7d73303b..854792b2 100644
--- a/relay/adaptor/openai/helper.go
+++ b/relay/adaptor/openai/helper.go
@@ -2,8 +2,8 @@ package openai
import (
"fmt"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/model"
"strings"
)
diff --git a/relay/adaptor/openai/image.go b/relay/adaptor/openai/image.go
index 0f89618a..9ea37f02 100644
--- a/relay/adaptor/openai/image.go
+++ b/relay/adaptor/openai/image.go
@@ -3,8 +3,8 @@ package openai
import (
"bytes"
"encoding/json"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)
diff --git a/relay/adaptor/openai/main.go b/relay/adaptor/openai/main.go
index 68d8f48f..adec386d 100644
--- a/relay/adaptor/openai/main.go
+++ b/relay/adaptor/openai/main.go
@@ -4,15 +4,16 @@ import (
"bufio"
"bytes"
"encoding/json"
- "github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/conv"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/relay/model"
- "github.com/songquanpeng/one-api/relay/relaymode"
"io"
"net/http"
"strings"
+
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/conv"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
+ "github.com/gin-gonic/gin"
)
func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) {
diff --git a/relay/adaptor/openai/model.go b/relay/adaptor/openai/model.go
index ce252ff6..e4f05957 100644
--- a/relay/adaptor/openai/model.go
+++ b/relay/adaptor/openai/model.go
@@ -1,6 +1,6 @@
package openai
-import "github.com/songquanpeng/one-api/relay/model"
+import "github.com/Laisky/one-api/relay/model"
type TextContent struct {
Type string `json:"type,omitempty"`
diff --git a/relay/adaptor/openai/token.go b/relay/adaptor/openai/token.go
index 1e61d255..510c2118 100644
--- a/relay/adaptor/openai/token.go
+++ b/relay/adaptor/openai/token.go
@@ -3,12 +3,12 @@ package openai
import (
"fmt"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/image"
+ "github.com/Laisky/one-api/common/logger"
+ billingratio "github.com/Laisky/one-api/relay/billing/ratio"
+ "github.com/Laisky/one-api/relay/model"
"github.com/pkoukk/tiktoken-go"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/image"
- "github.com/songquanpeng/one-api/common/logger"
- billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
- "github.com/songquanpeng/one-api/relay/model"
"math"
"strings"
)
diff --git a/relay/adaptor/openai/util.go b/relay/adaptor/openai/util.go
index ba0cab7d..86d1d31f 100644
--- a/relay/adaptor/openai/util.go
+++ b/relay/adaptor/openai/util.go
@@ -1,6 +1,6 @@
package openai
-import "github.com/songquanpeng/one-api/relay/model"
+import "github.com/Laisky/one-api/relay/model"
func ErrorWrapper(err error, code string, statusCode int) *model.ErrorWithStatusCode {
Error := model.Error{
diff --git a/relay/adaptor/palm/adaptor.go b/relay/adaptor/palm/adaptor.go
index fa73dd30..7dd520d8 100644
--- a/relay/adaptor/palm/adaptor.go
+++ b/relay/adaptor/palm/adaptor.go
@@ -3,11 +3,11 @@ package palm
import (
"fmt"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/relay/adaptor"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)
diff --git a/relay/adaptor/palm/model.go b/relay/adaptor/palm/model.go
index f653022c..5908a726 100644
--- a/relay/adaptor/palm/model.go
+++ b/relay/adaptor/palm/model.go
@@ -1,7 +1,7 @@
package palm
import (
- "github.com/songquanpeng/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/model"
)
type ChatMessage struct {
diff --git a/relay/adaptor/palm/palm.go b/relay/adaptor/palm/palm.go
index 1e60e7cd..c2b20a1e 100644
--- a/relay/adaptor/palm/palm.go
+++ b/relay/adaptor/palm/palm.go
@@ -3,14 +3,14 @@ package palm
import (
"encoding/json"
"fmt"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/constant"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/helper"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/common/random"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/constant"
- "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)
diff --git a/relay/adaptor/tencent/adaptor.go b/relay/adaptor/tencent/adaptor.go
index a9d62e2f..258ada72 100644
--- a/relay/adaptor/tencent/adaptor.go
+++ b/relay/adaptor/tencent/adaptor.go
@@ -1,76 +1,84 @@
package tencent
-// import (
-// "github.com/Laisky/errors/v2"
-// "fmt"
-// "github.com/gin-gonic/gin"
-// "github.com/songquanpeng/one-api/relay/channel"
-// "github.com/songquanpeng/one-api/relay/channel/openai"
-// "github.com/songquanpeng/one-api/relay/model"
-// "github.com/songquanpeng/one-api/relay/util"
-// "io"
-// "net/http"
-// "strings"
-// )
+import (
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "strings"
-// // https://cloud.tencent.com/document/api/1729/101837
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/gin-gonic/gin"
+)
-// type Adaptor struct {
-// Sign string
-// }
+// https://cloud.tencent.com/document/api/1729/101837
-// func (a *Adaptor) Init(meta *util.RelayMeta) {
+type Adaptor struct {
+ Sign string
+}
-// }
+func (a *Adaptor) Init(meta *meta.Meta) {
-// func (a *Adaptor) GetRequestURL(meta *util.RelayMeta) (string, error) {
-// return fmt.Sprintf("%s/hyllm/v1/chat/completions", meta.BaseURL), nil
-// }
+}
-// func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *util.RelayMeta) error {
-// channel.SetupCommonRequestHeader(c, req, meta)
-// req.Header.Set("Authorization", a.Sign)
-// req.Header.Set("X-TC-Action", meta.ActualModelName)
-// return nil
-// }
+func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
+ return fmt.Sprintf("%s/hyllm/v1/chat/completions", meta.BaseURL), nil
+}
-// func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
-// if request == nil {
-// return nil, errors.New("request is nil")
-// }
-// apiKey := c.Request.Header.Get("Authorization")
-// apiKey = strings.TrimPrefix(apiKey, "Bearer ")
-// appId, secretId, secretKey, err := ParseConfig(apiKey)
-// if err != nil {
-// return nil, err
-// }
-// tencentRequest := ConvertRequest(*request)
-// tencentRequest.AppId = appId
-// tencentRequest.SecretId = secretId
-// // we have to calculate the sign here
-// a.Sign = GetSign(*tencentRequest, secretKey)
-// return tencentRequest, nil
-// }
+func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
+ adaptor.SetupCommonRequestHeader(c, req, meta)
+ req.Header.Set("Authorization", a.Sign)
+ req.Header.Set("X-TC-Action", meta.ActualModelName)
+ return nil
+}
-// func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
-// return channel.DoRequestHelper(a, c, meta, requestBody)
-// }
+func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ apiKey := c.Request.Header.Get("Authorization")
+ apiKey = strings.TrimPrefix(apiKey, "Bearer ")
+ appId, secretId, secretKey, err := ParseConfig(apiKey)
+ if err != nil {
+ return nil, err
+ }
+ tencentRequest := ConvertRequest(*request)
+ tencentRequest.AppId = appId
+ tencentRequest.SecretId = secretId
+ // we have to calculate the sign here
+ a.Sign = GetSign(*tencentRequest, secretKey)
+ return tencentRequest, nil
+}
-// func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *util.RelayMeta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
-// if meta.IsStream {
-// var responseText string
-// err, responseText = StreamHandler(c, resp)
-// usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
-// } else {
-// err, usage = Handler(c, resp)
-// }
-// return
-// }
+func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ return request, nil
+}
-// func (a *Adaptor) GetModelList() []string {
-// return ModelList
-// }
+func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
+ return adaptor.DoRequestHelper(a, c, meta, requestBody)
+}
-// func (a *Adaptor) GetChannelName() string {
-// return "tencent"
-// }
+func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
+ if meta.IsStream {
+ var responseText string
+ err, responseText = StreamHandler(c, resp)
+ usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
+ } else {
+ err, usage = Handler(c, resp)
+ }
+ return
+}
+
+func (a *Adaptor) GetModelList() []string {
+ return ModelList
+}
+
+func (a *Adaptor) GetChannelName() string {
+ return "tencent"
+}
diff --git a/relay/adaptor/tencent/main.go b/relay/adaptor/tencent/main.go
index aa87e9ce..e2a026ff 100644
--- a/relay/adaptor/tencent/main.go
+++ b/relay/adaptor/tencent/main.go
@@ -1,238 +1,231 @@
package tencent
-// import (
-// "bufio"
-// "crypto/hmac"
-// "crypto/sha1"
-// "encoding/base64"
-// "encoding/json"
-// "github.com/Laisky/errors/v2"
-// "fmt"
-// "github.com/gin-gonic/gin"
-// "github.com/songquanpeng/one-api/common"
-// "github.com/songquanpeng/one-api/common/helper"
-// "github.com/songquanpeng/one-api/common/logger"
-// "github.com/songquanpeng/one-api/relay/channel/openai"
-// "github.com/songquanpeng/one-api/relay/constant"
-// "github.com/songquanpeng/one-api/relay/model"
-// "io"
-// "net/http"
-// "sort"
-// "strconv"
-// "strings"
-// )
+import (
+ "bufio"
+ "crypto/hmac"
+ "crypto/sha1"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "sort"
+ "strconv"
+ "strings"
-// // https://cloud.tencent.com/document/product/1729/97732
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/conv"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/constant"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/gin-gonic/gin"
+)
-// func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
-// messages := make([]Message, 0, len(request.Messages))
-// for i := 0; i < len(request.Messages); i++ {
-// message := request.Messages[i]
-// if message.Role == "system" {
-// messages = append(messages, Message{
-// Role: "user",
-// Content: message.StringContent(),
-// })
-// messages = append(messages, Message{
-// Role: "assistant",
-// Content: "Okay",
-// })
-// continue
-// }
-// messages = append(messages, Message{
-// Content: message.StringContent(),
-// Role: message.Role,
-// })
-// }
-// stream := 0
-// if request.Stream {
-// stream = 1
-// }
-// return &ChatRequest{
-// Timestamp: helper.GetTimestamp(),
-// Expired: helper.GetTimestamp() + 24*60*60,
-// QueryID: helper.GetUUID(),
-// Temperature: request.Temperature,
-// TopP: request.TopP,
-// Stream: stream,
-// Messages: messages,
-// }
-// }
+// https://cloud.tencent.com/document/product/1729/97732
-// func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse {
-// fullTextResponse := openai.TextResponse{
-// Object: "chat.completion",
-// Created: helper.GetTimestamp(),
-// Usage: response.Usage,
-// }
-// if len(response.Choices) > 0 {
-// choice := openai.TextResponseChoice{
-// Index: 0,
-// Message: model.Message{
-// Role: "assistant",
-// Content: response.Choices[0].Messages.Content,
-// },
-// FinishReason: response.Choices[0].FinishReason,
-// }
-// fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
-// }
-// return &fullTextResponse
-// }
+func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
+ messages := make([]Message, 0, len(request.Messages))
+ for i := 0; i < len(request.Messages); i++ {
+ message := request.Messages[i]
+ messages = append(messages, Message{
+ Content: message.StringContent(),
+ Role: message.Role,
+ })
+ }
+ stream := 0
+ if request.Stream {
+ stream = 1
+ }
+ return &ChatRequest{
+ Timestamp: helper.GetTimestamp(),
+ Expired: helper.GetTimestamp() + 24*60*60,
+ QueryID: random.GetUUID(),
+ Temperature: request.Temperature,
+ TopP: request.TopP,
+ Stream: stream,
+ Messages: messages,
+ }
+}
-// func streamResponseTencent2OpenAI(TencentResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
-// response := openai.ChatCompletionsStreamResponse{
-// Object: "chat.completion.chunk",
-// Created: helper.GetTimestamp(),
-// Model: "tencent-hunyuan",
-// }
-// if len(TencentResponse.Choices) > 0 {
-// var choice openai.ChatCompletionsStreamResponseChoice
-// choice.Delta.Content = TencentResponse.Choices[0].Delta.Content
-// if TencentResponse.Choices[0].FinishReason == "stop" {
-// choice.FinishReason = &constant.StopFinishReason
-// }
-// response.Choices = append(response.Choices, choice)
-// }
-// return &response
-// }
+func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse {
+ fullTextResponse := openai.TextResponse{
+ Object: "chat.completion",
+ Created: helper.GetTimestamp(),
+ Usage: response.Usage,
+ }
+ if len(response.Choices) > 0 {
+ choice := openai.TextResponseChoice{
+ Index: 0,
+ Message: model.Message{
+ Role: "assistant",
+ Content: response.Choices[0].Messages.Content,
+ },
+ FinishReason: response.Choices[0].FinishReason,
+ }
+ fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
+ }
+ return &fullTextResponse
+}
-// func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
-// var responseText string
-// scanner := bufio.NewScanner(resp.Body)
-// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
-// if atEOF && len(data) == 0 {
-// return 0, nil, nil
-// }
-// if i := strings.Index(string(data), "\n"); i >= 0 {
-// return i + 1, data[0:i], nil
-// }
-// if atEOF {
-// return len(data), data, nil
-// }
-// return 0, nil, nil
-// })
-// dataChan := make(chan string)
-// stopChan := make(chan bool)
-// go func() {
-// for scanner.Scan() {
-// data := scanner.Text()
-// if len(data) < 5 { // ignore blank line or wrong format
-// continue
-// }
-// if data[:5] != "data:" {
-// continue
-// }
-// data = data[5:]
-// dataChan <- data
-// }
-// stopChan <- true
-// }()
-// common.SetEventStreamHeaders(c)
-// c.Stream(func(w io.Writer) bool {
-// select {
-// case data := <-dataChan:
-// var TencentResponse ChatResponse
-// err := json.Unmarshal([]byte(data), &TencentResponse)
-// if err != nil {
-// logger.SysError("error unmarshalling stream response: " + err.Error())
-// return true
-// }
-// response := streamResponseTencent2OpenAI(&TencentResponse)
-// if len(response.Choices) != 0 {
-// responseText += response.Choices[0].Delta.Content
-// }
-// jsonResponse, err := json.Marshal(response)
-// if err != nil {
-// logger.SysError("error marshalling stream response: " + err.Error())
-// return true
-// }
-// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
-// return true
-// case <-stopChan:
-// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
-// return false
-// }
-// })
-// err := resp.Body.Close()
-// if err != nil {
-// return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
-// }
-// return nil, responseText
-// }
+func streamResponseTencent2OpenAI(TencentResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
+ response := openai.ChatCompletionsStreamResponse{
+ Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
+ Object: "chat.completion.chunk",
+ Created: helper.GetTimestamp(),
+ Model: "tencent-hunyuan",
+ }
+ if len(TencentResponse.Choices) > 0 {
+ var choice openai.ChatCompletionsStreamResponseChoice
+ choice.Delta.Content = TencentResponse.Choices[0].Delta.Content
+ if TencentResponse.Choices[0].FinishReason == "stop" {
+ choice.FinishReason = &constant.StopFinishReason
+ }
+ response.Choices = append(response.Choices, choice)
+ }
+ return &response
+}
-// func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
-// var TencentResponse ChatResponse
-// responseBody, err := io.ReadAll(resp.Body)
-// if err != nil {
-// return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = resp.Body.Close()
-// if err != nil {
-// return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = json.Unmarshal(responseBody, &TencentResponse)
-// if err != nil {
-// return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// if TencentResponse.Error.Code != 0 {
-// return &model.ErrorWithStatusCode{
-// Error: model.Error{
-// Message: TencentResponse.Error.Message,
-// Code: TencentResponse.Error.Code,
-// },
-// StatusCode: resp.StatusCode,
-// }, nil
-// }
-// fullTextResponse := responseTencent2OpenAI(&TencentResponse)
-// fullTextResponse.Model = "hunyuan"
-// jsonResponse, err := json.Marshal(fullTextResponse)
-// if err != nil {
-// return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// c.Writer.Header().Set("Content-Type", "application/json")
-// c.Writer.WriteHeader(resp.StatusCode)
-// _, err = c.Writer.Write(jsonResponse)
-// if err != nil {
-// return openai.ErrorWrapper(err, "write_response_body_failed", http.StatusInternalServerError), nil
-// }
-// return nil, &fullTextResponse.Usage
-// }
+func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
+ var responseText string
+ scanner := bufio.NewScanner(resp.Body)
+ scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+ if atEOF && len(data) == 0 {
+ return 0, nil, nil
+ }
+ if i := strings.Index(string(data), "\n"); i >= 0 {
+ return i + 1, data[0:i], nil
+ }
+ if atEOF {
+ return len(data), data, nil
+ }
+ return 0, nil, nil
+ })
+ dataChan := make(chan string)
+ stopChan := make(chan bool)
+ go func() {
+ for scanner.Scan() {
+ data := scanner.Text()
+ if len(data) < 5 { // ignore blank line or wrong format
+ continue
+ }
+ if data[:5] != "data:" {
+ continue
+ }
+ data = data[5:]
+ dataChan <- data
+ }
+ stopChan <- true
+ }()
+ common.SetEventStreamHeaders(c)
+ c.Stream(func(w io.Writer) bool {
+ select {
+ case data := <-dataChan:
+ var TencentResponse ChatResponse
+ err := json.Unmarshal([]byte(data), &TencentResponse)
+ if err != nil {
+ logger.SysError("error unmarshalling stream response: " + err.Error())
+ return true
+ }
+ response := streamResponseTencent2OpenAI(&TencentResponse)
+ if len(response.Choices) != 0 {
+ responseText += conv.AsString(response.Choices[0].Delta.Content)
+ }
+ jsonResponse, err := json.Marshal(response)
+ if err != nil {
+ logger.SysError("error marshalling stream response: " + err.Error())
+ return true
+ }
+ c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
+ return true
+ case <-stopChan:
+ c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
+ return false
+ }
+ })
+ err := resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
+ }
+ return nil, responseText
+}
-// func ParseConfig(config string) (appId int64, secretId string, secretKey string, err error) {
-// parts := strings.Split(config, "|")
-// if len(parts) != 3 {
-// err = errors.New("invalid tencent config")
-// return
-// }
-// appId, err = strconv.ParseInt(parts[0], 10, 64)
-// secretId = parts[1]
-// secretKey = parts[2]
-// return
-// }
+func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var TencentResponse ChatResponse
+ responseBody, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = json.Unmarshal(responseBody, &TencentResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ if TencentResponse.Error.Code != 0 {
+ return &model.ErrorWithStatusCode{
+ Error: model.Error{
+ Message: TencentResponse.Error.Message,
+ Code: TencentResponse.Error.Code,
+ },
+ StatusCode: resp.StatusCode,
+ }, nil
+ }
+ fullTextResponse := responseTencent2OpenAI(&TencentResponse)
+ fullTextResponse.Model = "hunyuan"
+ jsonResponse, err := json.Marshal(fullTextResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ c.Writer.Header().Set("Content-Type", "application/json")
+ c.Writer.WriteHeader(resp.StatusCode)
+ _, err = c.Writer.Write(jsonResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "write_response_body_failed", http.StatusInternalServerError), nil
+ }
+ return nil, &fullTextResponse.Usage
+}
-// func GetSign(req ChatRequest, secretKey string) string {
-// params := make([]string, 0)
-// params = append(params, "app_id="+strconv.FormatInt(req.AppId, 10))
-// params = append(params, "secret_id="+req.SecretId)
-// params = append(params, "timestamp="+strconv.FormatInt(req.Timestamp, 10))
-// params = append(params, "query_id="+req.QueryID)
-// params = append(params, "temperature="+strconv.FormatFloat(req.Temperature, 'f', -1, 64))
-// params = append(params, "top_p="+strconv.FormatFloat(req.TopP, 'f', -1, 64))
-// params = append(params, "stream="+strconv.Itoa(req.Stream))
-// params = append(params, "expired="+strconv.FormatInt(req.Expired, 10))
+func ParseConfig(config string) (appId int64, secretId string, secretKey string, err error) {
+ parts := strings.Split(config, "|")
+ if len(parts) != 3 {
+ err = errors.New("invalid tencent config")
+ return
+ }
+ appId, err = strconv.ParseInt(parts[0], 10, 64)
+ secretId = parts[1]
+ secretKey = parts[2]
+ return
+}
-// var messageStr string
-// for _, msg := range req.Messages {
-// messageStr += fmt.Sprintf(`{"role":"%s","content":"%s"},`, msg.Role, msg.Content)
-// }
-// messageStr = strings.TrimSuffix(messageStr, ",")
-// params = append(params, "messages=["+messageStr+"]")
+func GetSign(req ChatRequest, secretKey string) string {
+ params := make([]string, 0)
+ params = append(params, "app_id="+strconv.FormatInt(req.AppId, 10))
+ params = append(params, "secret_id="+req.SecretId)
+ params = append(params, "timestamp="+strconv.FormatInt(req.Timestamp, 10))
+ params = append(params, "query_id="+req.QueryID)
+ params = append(params, "temperature="+strconv.FormatFloat(req.Temperature, 'f', -1, 64))
+ params = append(params, "top_p="+strconv.FormatFloat(req.TopP, 'f', -1, 64))
+ params = append(params, "stream="+strconv.Itoa(req.Stream))
+ params = append(params, "expired="+strconv.FormatInt(req.Expired, 10))
-// sort.Strings(params)
-// url := "hunyuan.cloud.tencent.com/hyllm/v1/chat/completions?" + strings.Join(params, "&")
-// mac := hmac.New(sha1.New, []byte(secretKey))
-// signURL := url
-// mac.Write([]byte(signURL))
-// sign := mac.Sum([]byte(nil))
-// return base64.StdEncoding.EncodeToString(sign)
-// }
+ var messageStr string
+ for _, msg := range req.Messages {
+ messageStr += fmt.Sprintf(`{"role":"%s","content":"%s"},`, msg.Role, msg.Content)
+ }
+ messageStr = strings.TrimSuffix(messageStr, ",")
+ params = append(params, "messages=["+messageStr+"]")
+
+ sort.Strings(params)
+ url := "hunyuan.cloud.tencent.com/hyllm/v1/chat/completions?" + strings.Join(params, "&")
+ mac := hmac.New(sha1.New, []byte(secretKey))
+ signURL := url
+ mac.Write([]byte(signURL))
+ sign := mac.Sum([]byte(nil))
+ return base64.StdEncoding.EncodeToString(sign)
+}
diff --git a/relay/adaptor/tencent/model.go b/relay/adaptor/tencent/model.go
index 943942b6..67f2ee1c 100644
--- a/relay/adaptor/tencent/model.go
+++ b/relay/adaptor/tencent/model.go
@@ -1,63 +1,63 @@
package tencent
-// import (
-// "github.com/songquanpeng/one-api/relay/model"
-// )
+import (
+ "github.com/Laisky/one-api/relay/model"
+)
-// type Message struct {
-// Role string `json:"role"`
-// Content string `json:"content"`
-// }
+type Message struct {
+ Role string `json:"role"`
+ Content string `json:"content"`
+}
-// type ChatRequest struct {
-// AppId int64 `json:"app_id"` // 腾讯云账号的 APPID
-// SecretId string `json:"secret_id"` // 官网 SecretId
-// // Timestamp当前 UNIX 时间戳,单位为秒,可记录发起 API 请求的时间。
-// // 例如1529223702,如果与当前时间相差过大,会引起签名过期错误
-// Timestamp int64 `json:"timestamp"`
-// // Expired 签名的有效期,是一个符合 UNIX Epoch 时间戳规范的数值,
-// // 单位为秒;Expired 必须大于 Timestamp 且 Expired-Timestamp 小于90天
-// Expired int64 `json:"expired"`
-// QueryID string `json:"query_id"` //请求 Id,用于问题排查
-// // Temperature 较高的数值会使输出更加随机,而较低的数值会使其更加集中和确定
-// // 默认 1.0,取值区间为[0.0,2.0],非必要不建议使用,不合理的取值会影响效果
-// // 建议该参数和 top_p 只设置1个,不要同时更改 top_p
-// Temperature float64 `json:"temperature"`
-// // TopP 影响输出文本的多样性,取值越大,生成文本的多样性越强
-// // 默认1.0,取值区间为[0.0, 1.0],非必要不建议使用, 不合理的取值会影响效果
-// // 建议该参数和 temperature 只设置1个,不要同时更改
-// TopP float64 `json:"top_p"`
-// // Stream 0:同步,1:流式 (默认,协议:SSE)
-// // 同步请求超时:60s,如果内容较长建议使用流式
-// Stream int `json:"stream"`
-// // Messages 会话内容, 长度最多为40, 按对话时间从旧到新在数组中排列
-// // 输入 content 总数最大支持 3000 token。
-// Messages []Message `json:"messages"`
-// }
+type ChatRequest struct {
+ AppId int64 `json:"app_id"` // 腾讯云账号的 APPID
+ SecretId string `json:"secret_id"` // 官网 SecretId
+ // Timestamp当前 UNIX 时间戳,单位为秒,可记录发起 API 请求的时间。
+ // 例如1529223702,如果与当前时间相差过大,会引起签名过期错误
+ Timestamp int64 `json:"timestamp"`
+ // Expired 签名的有效期,是一个符合 UNIX Epoch 时间戳规范的数值,
+ // 单位为秒;Expired 必须大于 Timestamp 且 Expired-Timestamp 小于90天
+ Expired int64 `json:"expired"`
+ QueryID string `json:"query_id"` //请求 Id,用于问题排查
+ // Temperature 较高的数值会使输出更加随机,而较低的数值会使其更加集中和确定
+ // 默认 1.0,取值区间为[0.0,2.0],非必要不建议使用,不合理的取值会影响效果
+ // 建议该参数和 top_p 只设置1个,不要同时更改 top_p
+ Temperature float64 `json:"temperature"`
+ // TopP 影响输出文本的多样性,取值越大,生成文本的多样性越强
+ // 默认1.0,取值区间为[0.0, 1.0],非必要不建议使用, 不合理的取值会影响效果
+ // 建议该参数和 temperature 只设置1个,不要同时更改
+ TopP float64 `json:"top_p"`
+ // Stream 0:同步,1:流式 (默认,协议:SSE)
+ // 同步请求超时:60s,如果内容较长建议使用流式
+ Stream int `json:"stream"`
+ // Messages 会话内容, 长度最多为40, 按对话时间从旧到新在数组中排列
+ // 输入 content 总数最大支持 3000 token。
+ Messages []Message `json:"messages"`
+}
-// type Error struct {
-// Code int `json:"code"`
-// Message string `json:"message"`
-// }
+type Error struct {
+ Code int `json:"code"`
+ Message string `json:"message"`
+}
-// type Usage struct {
-// InputTokens int `json:"input_tokens"`
-// OutputTokens int `json:"output_tokens"`
-// TotalTokens int `json:"total_tokens"`
-// }
+type Usage struct {
+ InputTokens int `json:"input_tokens"`
+ OutputTokens int `json:"output_tokens"`
+ TotalTokens int `json:"total_tokens"`
+}
-// type ResponseChoices struct {
-// FinishReason string `json:"finish_reason,omitempty"` // 流式结束标志位,为 stop 则表示尾包
-// Messages Message `json:"messages,omitempty"` // 内容,同步模式返回内容,流模式为 null 输出 content 内容总数最多支持 1024token。
-// Delta Message `json:"delta,omitempty"` // 内容,流模式返回内容,同步模式为 null 输出 content 内容总数最多支持 1024token。
-// }
+type ResponseChoices struct {
+ FinishReason string `json:"finish_reason,omitempty"` // 流式结束标志位,为 stop 则表示尾包
+ Messages Message `json:"messages,omitempty"` // 内容,同步模式返回内容,流模式为 null 输出 content 内容总数最多支持 1024token。
+ Delta Message `json:"delta,omitempty"` // 内容,流模式返回内容,同步模式为 null 输出 content 内容总数最多支持 1024token。
+}
-// type ChatResponse struct {
-// Choices []ResponseChoices `json:"choices,omitempty"` // 结果
-// Created string `json:"created,omitempty"` // unix 时间戳的字符串
-// Id string `json:"id,omitempty"` // 会话 id
-// Usage model.Usage `json:"usage,omitempty"` // token 数量
-// Error Error `json:"error,omitempty"` // 错误信息 注意:此字段可能返回 null,表示取不到有效值
-// Note string `json:"note,omitempty"` // 注释
-// ReqID string `json:"req_id,omitempty"` // 唯一请求 Id,每次请求都会返回。用于反馈接口入参
-// }
+type ChatResponse struct {
+ Choices []ResponseChoices `json:"choices,omitempty"` // 结果
+ Created string `json:"created,omitempty"` // unix 时间戳的字符串
+ Id string `json:"id,omitempty"` // 会话 id
+ Usage model.Usage `json:"usage,omitempty"` // token 数量
+ Error Error `json:"error,omitempty"` // 错误信息 注意:此字段可能返回 null,表示取不到有效值
+ Note string `json:"note,omitempty"` // 注释
+ ReqID string `json:"req_id,omitempty"` // 唯一请求 Id,每次请求都会返回。用于反馈接口入参
+}
diff --git a/relay/adaptor/xunfei/adaptor.go b/relay/adaptor/xunfei/adaptor.go
index 9b112f7e..f67cc293 100644
--- a/relay/adaptor/xunfei/adaptor.go
+++ b/relay/adaptor/xunfei/adaptor.go
@@ -1,70 +1,78 @@
package xunfei
-// import (
-// "github.com/Laisky/errors/v2"
-// "github.com/gin-gonic/gin"
-// "github.com/songquanpeng/one-api/relay/channel"
-// "github.com/songquanpeng/one-api/relay/channel/openai"
-// "github.com/songquanpeng/one-api/relay/model"
-// "github.com/songquanpeng/one-api/relay/util"
-// "io"
-// "net/http"
-// "strings"
-// )
+import (
+ "errors"
+ "io"
+ "net/http"
+ "strings"
-// type Adaptor struct {
-// request *model.GeneralOpenAIRequest
-// }
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/gin-gonic/gin"
+)
-// func (a *Adaptor) Init(meta *util.RelayMeta) {
+type Adaptor struct {
+ request *model.GeneralOpenAIRequest
+}
-// }
+func (a *Adaptor) Init(meta *meta.Meta) {
-// func (a *Adaptor) GetRequestURL(meta *util.RelayMeta) (string, error) {
-// return "", nil
-// }
+}
-// func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *util.RelayMeta) error {
-// channel.SetupCommonRequestHeader(c, req, meta)
-// // check DoResponse for auth part
-// return nil
-// }
+func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
+ return "", nil
+}
-// func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
-// if request == nil {
-// return nil, errors.New("request is nil")
-// }
-// a.request = request
-// return nil, nil
-// }
+func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
+ adaptor.SetupCommonRequestHeader(c, req, meta)
+ // check DoResponse for auth part
+ return nil
+}
-// func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io.Reader) (*http.Response, error) {
-// // xunfei's request is not http request, so we don't need to do anything here
-// dummyResp := &http.Response{}
-// dummyResp.StatusCode = http.StatusOK
-// return dummyResp, nil
-// }
+func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ a.request = request
+ return nil, nil
+}
-// func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *util.RelayMeta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
-// splits := strings.Split(meta.APIKey, "|")
-// if len(splits) != 3 {
-// return nil, openai.ErrorWrapper(errors.New("invalid auth"), "invalid_auth", http.StatusBadRequest)
-// }
-// if a.request == nil {
-// return nil, openai.ErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest)
-// }
-// if meta.IsStream {
-// err, usage = StreamHandler(c, *a.request, splits[0], splits[1], splits[2])
-// } else {
-// err, usage = Handler(c, *a.request, splits[0], splits[1], splits[2])
-// }
-// return
-// }
+func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ return request, nil
+}
-// func (a *Adaptor) GetModelList() []string {
-// return ModelList
-// }
+func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
+ // xunfei's request is not http request, so we don't need to do anything here
+ dummyResp := &http.Response{}
+ dummyResp.StatusCode = http.StatusOK
+ return dummyResp, nil
+}
-// func (a *Adaptor) GetChannelName() string {
-// return "xunfei"
-// }
+func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
+ splits := strings.Split(meta.APIKey, "|")
+ if len(splits) != 3 {
+ return nil, openai.ErrorWrapper(errors.New("invalid auth"), "invalid_auth", http.StatusBadRequest)
+ }
+ if a.request == nil {
+ return nil, openai.ErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest)
+ }
+ if meta.IsStream {
+ err, usage = StreamHandler(c, *a.request, splits[0], splits[1], splits[2])
+ } else {
+ err, usage = Handler(c, *a.request, splits[0], splits[1], splits[2])
+ }
+ return
+}
+
+func (a *Adaptor) GetModelList() []string {
+ return ModelList
+}
+
+func (a *Adaptor) GetChannelName() string {
+ return "xunfei"
+}
diff --git a/relay/adaptor/xunfei/main.go b/relay/adaptor/xunfei/main.go
index 281f260b..9a5d12cd 100644
--- a/relay/adaptor/xunfei/main.go
+++ b/relay/adaptor/xunfei/main.go
@@ -1,306 +1,313 @@
package xunfei
-// import (
-// "crypto/hmac"
-// "crypto/sha256"
-// "encoding/base64"
-// "encoding/json"
-// "fmt"
-// "github.com/gin-gonic/gin"
-// "github.com/gorilla/websocket"
-// "io"
-// "net/http"
-// "net/url"
-// "one-api/common"
-// "strings"
-// "time"
-// )
+import (
+ "crypto/hmac"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
-// // https://console.xfyun.cn/services/cbm
-// // https://www.xfyun.cn/doc/spark/Web.html
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/common/random"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/constant"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/gin-gonic/gin"
+ "github.com/gorilla/websocket"
+)
-// type XunfeiMessage struct {
-// Role string `json:"role"`
-// Content string `json:"content"`
-// }
+// https://console.xfyun.cn/services/cbm
+// https://www.xfyun.cn/doc/spark/Web.html
-// type XunfeiChatRequest struct {
-// Header struct {
-// AppId string `json:"app_id"`
-// } `json:"header"`
-// Parameter struct {
-// Chat struct {
-// Domain string `json:"domain,omitempty"`
-// Temperature float64 `json:"temperature,omitempty"`
-// TopK int `json:"top_k,omitempty"`
-// MaxTokens int `json:"max_tokens,omitempty"`
-// Auditing bool `json:"auditing,omitempty"`
-// } `json:"chat"`
-// } `json:"parameter"`
-// Payload struct {
-// Message struct {
-// Text []XunfeiMessage `json:"text"`
-// } `json:"message"`
-// } `json:"payload"`
-// }
+func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string, domain string) *ChatRequest {
+ messages := make([]Message, 0, len(request.Messages))
+ var lastToolCalls []model.Tool
+ for _, message := range request.Messages {
+ if message.ToolCalls != nil {
+ lastToolCalls = message.ToolCalls
+ }
+ messages = append(messages, Message{
+ Role: message.Role,
+ Content: message.StringContent(),
+ })
+ }
+ xunfeiRequest := ChatRequest{}
+ xunfeiRequest.Header.AppId = xunfeiAppId
+ xunfeiRequest.Parameter.Chat.Domain = domain
+ xunfeiRequest.Parameter.Chat.Temperature = request.Temperature
+ xunfeiRequest.Parameter.Chat.TopK = request.N
+ xunfeiRequest.Parameter.Chat.MaxTokens = request.MaxTokens
+ xunfeiRequest.Payload.Message.Text = messages
+ if len(lastToolCalls) != 0 {
+ for _, toolCall := range lastToolCalls {
+ xunfeiRequest.Payload.Functions.Text = append(xunfeiRequest.Payload.Functions.Text, toolCall.Function)
+ }
+ }
-// type XunfeiChatResponseTextItem struct {
-// Content string `json:"content"`
-// Role string `json:"role"`
-// Index int `json:"index"`
-// }
+ return &xunfeiRequest
+}
-// type XunfeiChatResponse struct {
-// Header struct {
-// Code int `json:"code"`
-// Message string `json:"message"`
-// Sid string `json:"sid"`
-// Status int `json:"status"`
-// } `json:"header"`
-// Payload struct {
-// Choices struct {
-// Status int `json:"status"`
-// Seq int `json:"seq"`
-// Text []XunfeiChatResponseTextItem `json:"text"`
-// } `json:"choices"`
-// Usage struct {
-// //Text struct {
-// // QuestionTokens string `json:"question_tokens"`
-// // PromptTokens string `json:"prompt_tokens"`
-// // CompletionTokens string `json:"completion_tokens"`
-// // TotalTokens string `json:"total_tokens"`
-// //} `json:"text"`
-// Text Usage `json:"text"`
-// } `json:"usage"`
-// } `json:"payload"`
-// }
+func getToolCalls(response *ChatResponse) []model.Tool {
+ var toolCalls []model.Tool
+ if len(response.Payload.Choices.Text) == 0 {
+ return toolCalls
+ }
+ item := response.Payload.Choices.Text[0]
+ if item.FunctionCall == nil {
+ return toolCalls
+ }
+ toolCall := model.Tool{
+ Id: fmt.Sprintf("call_%s", random.GetUUID()),
+ Type: "function",
+ Function: *item.FunctionCall,
+ }
+ toolCalls = append(toolCalls, toolCall)
+ return toolCalls
+}
-// func requestOpenAI2Xunfei(request GeneralOpenAIRequest, xunfeiAppId string, domain string) *XunfeiChatRequest {
-// messages := make([]XunfeiMessage, 0, len(request.Messages))
-// for _, message := range request.Messages {
-// if message.Role == "system" {
-// messages = append(messages, XunfeiMessage{
-// Role: "user",
-// Content: message.Content,
-// })
-// messages = append(messages, XunfeiMessage{
-// Role: "assistant",
-// Content: "Okay",
-// })
-// } else {
-// messages = append(messages, XunfeiMessage{
-// Role: message.Role,
-// Content: message.Content,
-// })
-// }
-// }
-// xunfeiRequest := XunfeiChatRequest{}
-// xunfeiRequest.Header.AppId = xunfeiAppId
-// xunfeiRequest.Parameter.Chat.Domain = domain
-// xunfeiRequest.Parameter.Chat.Temperature = request.Temperature
-// xunfeiRequest.Parameter.Chat.TopK = request.N
-// xunfeiRequest.Parameter.Chat.MaxTokens = request.MaxTokens
-// xunfeiRequest.Payload.Message.Text = messages
-// return &xunfeiRequest
-// }
+func responseXunfei2OpenAI(response *ChatResponse) *openai.TextResponse {
+ if len(response.Payload.Choices.Text) == 0 {
+ response.Payload.Choices.Text = []ChatResponseTextItem{
+ {
+ Content: "",
+ },
+ }
+ }
+ choice := openai.TextResponseChoice{
+ Index: 0,
+ Message: model.Message{
+ Role: "assistant",
+ Content: response.Payload.Choices.Text[0].Content,
+ ToolCalls: getToolCalls(response),
+ },
+ FinishReason: constant.StopFinishReason,
+ }
+ fullTextResponse := openai.TextResponse{
+ Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
+ Object: "chat.completion",
+ Created: helper.GetTimestamp(),
+ Choices: []openai.TextResponseChoice{choice},
+ Usage: response.Payload.Usage.Text,
+ }
+ return &fullTextResponse
+}
-// func responseXunfei2OpenAI(response *XunfeiChatResponse) *OpenAITextResponse {
-// if len(response.Payload.Choices.Text) == 0 {
-// response.Payload.Choices.Text = []XunfeiChatResponseTextItem{
-// {
-// Content: "",
-// },
-// }
-// }
-// choice := OpenAITextResponseChoice{
-// Index: 0,
-// Message: Message{
-// Role: "assistant",
-// Content: response.Payload.Choices.Text[0].Content,
-// },
-// FinishReason: stopFinishReason,
-// }
-// fullTextResponse := OpenAITextResponse{
-// Object: "chat.completion",
-// Created: common.GetTimestamp(),
-// Choices: []OpenAITextResponseChoice{choice},
-// Usage: response.Payload.Usage.Text,
-// }
-// return &fullTextResponse
-// }
+func streamResponseXunfei2OpenAI(xunfeiResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
+ if len(xunfeiResponse.Payload.Choices.Text) == 0 {
+ xunfeiResponse.Payload.Choices.Text = []ChatResponseTextItem{
+ {
+ Content: "",
+ },
+ }
+ }
+ var choice openai.ChatCompletionsStreamResponseChoice
+ choice.Delta.Content = xunfeiResponse.Payload.Choices.Text[0].Content
+ choice.Delta.ToolCalls = getToolCalls(xunfeiResponse)
+ if xunfeiResponse.Payload.Choices.Status == 2 {
+ choice.FinishReason = &constant.StopFinishReason
+ }
+ response := openai.ChatCompletionsStreamResponse{
+ Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
+ Object: "chat.completion.chunk",
+ Created: helper.GetTimestamp(),
+ Model: "SparkDesk",
+ Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
+ }
+ return &response
+}
-// func streamResponseXunfei2OpenAI(xunfeiResponse *XunfeiChatResponse) *ChatCompletionsStreamResponse {
-// if len(xunfeiResponse.Payload.Choices.Text) == 0 {
-// xunfeiResponse.Payload.Choices.Text = []XunfeiChatResponseTextItem{
-// {
-// Content: "",
-// },
-// }
-// }
-// var choice ChatCompletionsStreamResponseChoice
-// choice.Delta.Content = xunfeiResponse.Payload.Choices.Text[0].Content
-// if xunfeiResponse.Payload.Choices.Status == 2 {
-// choice.FinishReason = &stopFinishReason
-// }
-// response := ChatCompletionsStreamResponse{
-// Object: "chat.completion.chunk",
-// Created: common.GetTimestamp(),
-// Model: "SparkDesk",
-// Choices: []ChatCompletionsStreamResponseChoice{choice},
-// }
-// return &response
-// }
+func buildXunfeiAuthUrl(hostUrl string, apiKey, apiSecret string) string {
+ HmacWithShaToBase64 := func(algorithm, data, key string) string {
+ mac := hmac.New(sha256.New, []byte(key))
+ mac.Write([]byte(data))
+ encodeData := mac.Sum(nil)
+ return base64.StdEncoding.EncodeToString(encodeData)
+ }
+ ul, err := url.Parse(hostUrl)
+ if err != nil {
+ fmt.Println(err)
+ }
+ date := time.Now().UTC().Format(time.RFC1123)
+ signString := []string{"host: " + ul.Host, "date: " + date, "GET " + ul.Path + " HTTP/1.1"}
+ sign := strings.Join(signString, "\n")
+ sha := HmacWithShaToBase64("hmac-sha256", sign, apiSecret)
+ authUrl := fmt.Sprintf("hmac username=\"%s\", algorithm=\"%s\", headers=\"%s\", signature=\"%s\"", apiKey,
+ "hmac-sha256", "host date request-line", sha)
+ authorization := base64.StdEncoding.EncodeToString([]byte(authUrl))
+ v := url.Values{}
+ v.Add("host", ul.Host)
+ v.Add("date", date)
+ v.Add("authorization", authorization)
+ callUrl := hostUrl + "?" + v.Encode()
+ return callUrl
+}
-// func buildXunfeiAuthUrl(hostUrl string, apiKey, apiSecret string) string {
-// HmacWithShaToBase64 := func(algorithm, data, key string) string {
-// mac := hmac.New(sha256.New, []byte(key))
-// mac.Write([]byte(data))
-// encodeData := mac.Sum(nil)
-// return base64.StdEncoding.EncodeToString(encodeData)
-// }
-// ul, err := url.Parse(hostUrl)
-// if err != nil {
-// fmt.Println(err)
-// }
-// date := time.Now().UTC().Format(time.RFC1123)
-// signString := []string{"host: " + ul.Host, "date: " + date, "GET " + ul.Path + " HTTP/1.1"}
-// sign := strings.Join(signString, "\n")
-// sha := HmacWithShaToBase64("hmac-sha256", sign, apiSecret)
-// authUrl := fmt.Sprintf("hmac username=\"%s\", algorithm=\"%s\", headers=\"%s\", signature=\"%s\"", apiKey,
-// "hmac-sha256", "host date request-line", sha)
-// authorization := base64.StdEncoding.EncodeToString([]byte(authUrl))
-// v := url.Values{}
-// v.Add("host", ul.Host)
-// v.Add("date", date)
-// v.Add("authorization", authorization)
-// callUrl := hostUrl + "?" + v.Encode()
-// return callUrl
-// }
+func StreamHandler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
+ domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret, textRequest.Model)
+ dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
+ if err != nil {
+ return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
+ }
+ common.SetEventStreamHeaders(c)
+ var usage model.Usage
+ c.Stream(func(w io.Writer) bool {
+ select {
+ case xunfeiResponse := <-dataChan:
+ usage.PromptTokens += xunfeiResponse.Payload.Usage.Text.PromptTokens
+ usage.CompletionTokens += xunfeiResponse.Payload.Usage.Text.CompletionTokens
+ usage.TotalTokens += xunfeiResponse.Payload.Usage.Text.TotalTokens
+ response := streamResponseXunfei2OpenAI(&xunfeiResponse)
+ jsonResponse, err := json.Marshal(response)
+ if err != nil {
+ logger.SysError("error marshalling stream response: " + err.Error())
+ return true
+ }
+ c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
+ return true
+ case <-stopChan:
+ c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
+ return false
+ }
+ })
+ return nil, &usage
+}
-// func xunfeiStreamHandler(c *gin.Context, textRequest GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*OpenAIErrorWithStatusCode, *Usage) {
-// domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret)
-// dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
-// if err != nil {
-// return errorWrapper(err, "make xunfei request err", http.StatusInternalServerError), nil
-// }
-// setEventStreamHeaders(c)
-// var usage Usage
-// c.Stream(func(w io.Writer) bool {
-// select {
-// case xunfeiResponse := <-dataChan:
-// usage.PromptTokens += xunfeiResponse.Payload.Usage.Text.PromptTokens
-// usage.CompletionTokens += xunfeiResponse.Payload.Usage.Text.CompletionTokens
-// usage.TotalTokens += xunfeiResponse.Payload.Usage.Text.TotalTokens
-// response := streamResponseXunfei2OpenAI(&xunfeiResponse)
-// jsonResponse, err := json.Marshal(response)
-// if err != nil {
-// common.SysError("error marshalling stream response: " + err.Error())
-// return true
-// }
-// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
-// return true
-// case <-stopChan:
-// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
-// return false
-// }
-// })
-// return nil, &usage
-// }
+func Handler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
+ domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret, textRequest.Model)
+ dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
+ if err != nil {
+ return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
+ }
+ var usage model.Usage
+ var content string
+ var xunfeiResponse ChatResponse
+ stop := false
+ for !stop {
+ select {
+ case xunfeiResponse = <-dataChan:
+ if len(xunfeiResponse.Payload.Choices.Text) == 0 {
+ continue
+ }
+ content += xunfeiResponse.Payload.Choices.Text[0].Content
+ usage.PromptTokens += xunfeiResponse.Payload.Usage.Text.PromptTokens
+ usage.CompletionTokens += xunfeiResponse.Payload.Usage.Text.CompletionTokens
+ usage.TotalTokens += xunfeiResponse.Payload.Usage.Text.TotalTokens
+ case stop = <-stopChan:
+ }
+ }
+ if len(xunfeiResponse.Payload.Choices.Text) == 0 {
+ return openai.ErrorWrapper(err, "xunfei_empty_response_detected", http.StatusInternalServerError), nil
+ }
+ xunfeiResponse.Payload.Choices.Text[0].Content = content
-// func xunfeiHandler(c *gin.Context, textRequest GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*OpenAIErrorWithStatusCode, *Usage) {
-// domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret)
-// dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
-// if err != nil {
-// return errorWrapper(err, "make xunfei request err", http.StatusInternalServerError), nil
-// }
-// var usage Usage
-// var content string
-// var xunfeiResponse XunfeiChatResponse
-// stop := false
-// for !stop {
-// select {
-// case xunfeiResponse = <-dataChan:
-// if len(xunfeiResponse.Payload.Choices.Text) == 0 {
-// continue
-// }
-// content += xunfeiResponse.Payload.Choices.Text[0].Content
-// usage.PromptTokens += xunfeiResponse.Payload.Usage.Text.PromptTokens
-// usage.CompletionTokens += xunfeiResponse.Payload.Usage.Text.CompletionTokens
-// usage.TotalTokens += xunfeiResponse.Payload.Usage.Text.TotalTokens
-// case stop = <-stopChan:
-// }
-// }
+ response := responseXunfei2OpenAI(&xunfeiResponse)
+ jsonResponse, err := json.Marshal(response)
+ if err != nil {
+ return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ c.Writer.Header().Set("Content-Type", "application/json")
+ _, _ = c.Writer.Write(jsonResponse)
+ return nil, &usage
+}
-// xunfeiResponse.Payload.Choices.Text[0].Content = content
+func xunfeiMakeRequest(textRequest model.GeneralOpenAIRequest, domain, authUrl, appId string) (chan ChatResponse, chan bool, error) {
+ d := websocket.Dialer{
+ HandshakeTimeout: 5 * time.Second,
+ }
+ conn, resp, err := d.Dial(authUrl, nil)
+ if err != nil || resp.StatusCode != 101 {
+ return nil, nil, err
+ }
+ data := requestOpenAI2Xunfei(textRequest, appId, domain)
+ err = conn.WriteJSON(data)
+ if err != nil {
+ return nil, nil, err
+ }
+ _, msg, err := conn.ReadMessage()
+ if err != nil {
+ return nil, nil, err
+ }
-// response := responseXunfei2OpenAI(&xunfeiResponse)
-// jsonResponse, err := json.Marshal(response)
-// if err != nil {
-// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// c.Writer.Header().Set("Content-Type", "application/json")
-// _, _ = c.Writer.Write(jsonResponse)
-// return nil, &usage
-// }
+ dataChan := make(chan ChatResponse)
+ stopChan := make(chan bool)
+ go func() {
+ for {
+ if msg == nil {
+ _, msg, err = conn.ReadMessage()
+ if err != nil {
+ logger.SysError("error reading stream response: " + err.Error())
+ break
+ }
+ }
+ var response ChatResponse
+ err = json.Unmarshal(msg, &response)
+ if err != nil {
+ logger.SysError("error unmarshalling stream response: " + err.Error())
+ break
+ }
+ msg = nil
+ dataChan <- response
+ if response.Payload.Choices.Status == 2 {
+ err := conn.Close()
+ if err != nil {
+ logger.SysError("error closing websocket connection: " + err.Error())
+ }
+ break
+ }
+ }
+ stopChan <- true
+ }()
-// func xunfeiMakeRequest(textRequest GeneralOpenAIRequest, domain, authUrl, appId string) (chan XunfeiChatResponse, chan bool, error) {
-// d := websocket.Dialer{
-// HandshakeTimeout: 5 * time.Second,
-// }
-// conn, resp, err := d.Dial(authUrl, nil)
-// if err != nil || resp.StatusCode != 101 {
-// return nil, nil, err
-// }
-// data := requestOpenAI2Xunfei(textRequest, appId, domain)
-// err = conn.WriteJSON(data)
-// if err != nil {
-// return nil, nil, err
-// }
+ return dataChan, stopChan, nil
+}
-// dataChan := make(chan XunfeiChatResponse)
-// stopChan := make(chan bool)
-// go func() {
-// for {
-// _, msg, err := conn.ReadMessage()
-// if err != nil {
-// common.SysError("error reading stream response: " + err.Error())
-// break
-// }
-// var response XunfeiChatResponse
-// err = json.Unmarshal(msg, &response)
-// if err != nil {
-// common.SysError("error unmarshalling stream response: " + err.Error())
-// break
-// }
-// dataChan <- response
-// if response.Payload.Choices.Status == 2 {
-// err := conn.Close()
-// if err != nil {
-// common.SysError("error closing websocket connection: " + err.Error())
-// }
-// break
-// }
-// }
-// stopChan <- true
-// }()
+func getAPIVersion(c *gin.Context, modelName string) string {
+ query := c.Request.URL.Query()
+ apiVersion := query.Get("api-version")
+ if apiVersion != "" {
+ return apiVersion
+ }
+ parts := strings.Split(modelName, "-")
+ if len(parts) == 2 {
+ apiVersion = parts[1]
+ return apiVersion
-// return dataChan, stopChan, nil
-// }
+ }
+ apiVersion = c.GetString(config.KeyAPIVersion)
+ if apiVersion != "" {
+ return apiVersion
+ }
+ apiVersion = "v1.1"
+ logger.SysLog("api_version not found, using default: " + apiVersion)
+ return apiVersion
+}
-// func getXunfeiAuthUrl(c *gin.Context, apiKey string, apiSecret string) (string, string) {
-// query := c.Request.URL.Query()
-// apiVersion := query.Get("api-version")
-// if apiVersion == "" {
-// apiVersion = c.GetString("api_version")
-// }
-// if apiVersion == "" {
-// apiVersion = "v1.1"
-// common.SysLog("api_version not found, use default: " + apiVersion)
-// }
-// domain := "general"
-// if apiVersion != "v1.1" {
-// domain += strings.Split(apiVersion, ".")[0]
-// }
-// authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
-// return domain, authUrl
-// }
+// https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E
+func apiVersion2domain(apiVersion string) string {
+ switch apiVersion {
+ case "v1.1":
+ return "general"
+ case "v2.1":
+ return "generalv2"
+ case "v3.1":
+ return "generalv3"
+ case "v3.5":
+ return "generalv3.5"
+ }
+ return "general" + apiVersion
+}
+
+func getXunfeiAuthUrl(c *gin.Context, apiKey string, apiSecret string, modelName string) (string, string) {
+ apiVersion := getAPIVersion(c, modelName)
+ domain := apiVersion2domain(apiVersion)
+ authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
+ return domain, authUrl
+}
diff --git a/relay/adaptor/xunfei/model.go b/relay/adaptor/xunfei/model.go
index d10e8ad7..b99a5994 100644
--- a/relay/adaptor/xunfei/model.go
+++ b/relay/adaptor/xunfei/model.go
@@ -1,61 +1,66 @@
package xunfei
-// import (
-// "github.com/songquanpeng/one-api/relay/model"
-// )
+import (
+ "github.com/Laisky/one-api/relay/model"
+)
-// type Message struct {
-// Role string `json:"role"`
-// Content string `json:"content"`
-// }
+type Message struct {
+ Role string `json:"role"`
+ Content string `json:"content"`
+}
-// type ChatRequest struct {
-// Header struct {
-// AppId string `json:"app_id"`
-// } `json:"header"`
-// Parameter struct {
-// Chat struct {
-// Domain string `json:"domain,omitempty"`
-// Temperature float64 `json:"temperature,omitempty"`
-// TopK int `json:"top_k,omitempty"`
-// MaxTokens int `json:"max_tokens,omitempty"`
-// Auditing bool `json:"auditing,omitempty"`
-// } `json:"chat"`
-// } `json:"parameter"`
-// Payload struct {
-// Message struct {
-// Text []Message `json:"text"`
-// } `json:"message"`
-// } `json:"payload"`
-// }
+type ChatRequest struct {
+ Header struct {
+ AppId string `json:"app_id"`
+ } `json:"header"`
+ Parameter struct {
+ Chat struct {
+ Domain string `json:"domain,omitempty"`
+ Temperature float64 `json:"temperature,omitempty"`
+ TopK int `json:"top_k,omitempty"`
+ MaxTokens int `json:"max_tokens,omitempty"`
+ Auditing bool `json:"auditing,omitempty"`
+ } `json:"chat"`
+ } `json:"parameter"`
+ Payload struct {
+ Message struct {
+ Text []Message `json:"text"`
+ } `json:"message"`
+ Functions struct {
+ Text []model.Function `json:"text,omitempty"`
+ } `json:"functions,omitempty"`
+ } `json:"payload"`
+}
-// type ChatResponseTextItem struct {
-// Content string `json:"content"`
-// Role string `json:"role"`
-// Index int `json:"index"`
-// }
+type ChatResponseTextItem struct {
+ Content string `json:"content"`
+ Role string `json:"role"`
+ Index int `json:"index"`
+ ContentType string `json:"content_type"`
+ FunctionCall *model.Function `json:"function_call"`
+}
-// type ChatResponse struct {
-// Header struct {
-// Code int `json:"code"`
-// Message string `json:"message"`
-// Sid string `json:"sid"`
-// Status int `json:"status"`
-// } `json:"header"`
-// Payload struct {
-// Choices struct {
-// Status int `json:"status"`
-// Seq int `json:"seq"`
-// Text []ChatResponseTextItem `json:"text"`
-// } `json:"choices"`
-// Usage struct {
-// //Text struct {
-// // QuestionTokens string `json:"question_tokens"`
-// // PromptTokens string `json:"prompt_tokens"`
-// // CompletionTokens string `json:"completion_tokens"`
-// // TotalTokens string `json:"total_tokens"`
-// //} `json:"text"`
-// Text model.Usage `json:"text"`
-// } `json:"usage"`
-// } `json:"payload"`
-// }
+type ChatResponse struct {
+ Header struct {
+ Code int `json:"code"`
+ Message string `json:"message"`
+ Sid string `json:"sid"`
+ Status int `json:"status"`
+ } `json:"header"`
+ Payload struct {
+ Choices struct {
+ Status int `json:"status"`
+ Seq int `json:"seq"`
+ Text []ChatResponseTextItem `json:"text"`
+ } `json:"choices"`
+ Usage struct {
+ //Text struct {
+ // QuestionTokens string `json:"question_tokens"`
+ // PromptTokens string `json:"prompt_tokens"`
+ // CompletionTokens string `json:"completion_tokens"`
+ // TotalTokens string `json:"total_tokens"`
+ //} `json:"text"`
+ Text model.Usage `json:"text"`
+ } `json:"usage"`
+ } `json:"payload"`
+}
diff --git a/relay/adaptor/zhipu/adaptor.go b/relay/adaptor/zhipu/adaptor.go
index 424fabd6..be70cb01 100644
--- a/relay/adaptor/zhipu/adaptor.go
+++ b/relay/adaptor/zhipu/adaptor.go
@@ -1,145 +1,146 @@
package zhipu
-// import (
-// "github.com/Laisky/errors/v2"
-// "fmt"
-// "github.com/gin-gonic/gin"
-// "github.com/songquanpeng/one-api/relay/adaptor"
-// "github.com/songquanpeng/one-api/relay/adaptor/openai"
-// "github.com/songquanpeng/one-api/relay/meta"
-// "github.com/songquanpeng/one-api/relay/model"
-// "github.com/songquanpeng/one-api/relay/relaymode"
-// "io"
-// "math"
-// "net/http"
-// "strings"
-// )
+import (
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "net/http"
+ "strings"
-// type Adaptor struct {
-// APIVersion string
-// }
+ "github.com/Laisky/one-api/relay/adaptor"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
+ "github.com/gin-gonic/gin"
+)
-// func (a *Adaptor) Init(meta *meta.Meta) {
+type Adaptor struct {
+ APIVersion string
+}
-// }
+func (a *Adaptor) Init(meta *meta.Meta) {
-// func (a *Adaptor) SetVersionByModeName(modelName string) {
-// if strings.HasPrefix(modelName, "glm-") {
-// a.APIVersion = "v4"
-// } else {
-// a.APIVersion = "v3"
-// }
-// }
+}
-// func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
-// switch meta.Mode {
-// case relaymode.ImagesGenerations:
-// return fmt.Sprintf("%s/api/paas/v4/images/generations", meta.BaseURL), nil
-// case relaymode.Embeddings:
-// return fmt.Sprintf("%s/api/paas/v4/embeddings", meta.BaseURL), nil
-// }
-// a.SetVersionByModeName(meta.ActualModelName)
-// if a.APIVersion == "v4" {
-// return fmt.Sprintf("%s/api/paas/v4/chat/completions", meta.BaseURL), nil
-// }
-// method := "invoke"
-// if meta.IsStream {
-// method = "sse-invoke"
-// }
-// return fmt.Sprintf("%s/api/paas/v3/model-api/%s/%s", meta.BaseURL, meta.ActualModelName, method), nil
-// }
+func (a *Adaptor) SetVersionByModeName(modelName string) {
+ if strings.HasPrefix(modelName, "glm-") {
+ a.APIVersion = "v4"
+ } else {
+ a.APIVersion = "v3"
+ }
+}
-// func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
-// adaptor.SetupCommonRequestHeader(c, req, meta)
-// token := GetToken(meta.APIKey)
-// req.Header.Set("Authorization", token)
-// return nil
-// }
+func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
+ switch meta.Mode {
+ case relaymode.ImagesGenerations:
+ return fmt.Sprintf("%s/api/paas/v4/images/generations", meta.BaseURL), nil
+ case relaymode.Embeddings:
+ return fmt.Sprintf("%s/api/paas/v4/embeddings", meta.BaseURL), nil
+ }
+ a.SetVersionByModeName(meta.ActualModelName)
+ if a.APIVersion == "v4" {
+ return fmt.Sprintf("%s/api/paas/v4/chat/completions", meta.BaseURL), nil
+ }
+ method := "invoke"
+ if meta.IsStream {
+ method = "sse-invoke"
+ }
+ return fmt.Sprintf("%s/api/paas/v3/model-api/%s/%s", meta.BaseURL, meta.ActualModelName, method), nil
+}
-// func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
-// if request == nil {
-// return nil, errors.New("request is nil")
-// }
-// switch relayMode {
-// case relaymode.Embeddings:
-// baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
-// return baiduEmbeddingRequest, nil
-// default:
-// // TopP (0.0, 1.0)
-// request.TopP = math.Min(0.99, request.TopP)
-// request.TopP = math.Max(0.01, request.TopP)
+func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
+ adaptor.SetupCommonRequestHeader(c, req, meta)
+ token := GetToken(meta.APIKey)
+ req.Header.Set("Authorization", token)
+ return nil
+}
-// // Temperature (0.0, 1.0)
-// request.Temperature = math.Min(0.99, request.Temperature)
-// request.Temperature = math.Max(0.01, request.Temperature)
-// a.SetVersionByModeName(request.Model)
-// if a.APIVersion == "v4" {
-// return request, nil
-// }
-// return ConvertRequest(*request), nil
-// }
-// }
+func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ switch relayMode {
+ case relaymode.Embeddings:
+ baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
+ return baiduEmbeddingRequest, nil
+ default:
+ // TopP (0.0, 1.0)
+ request.TopP = math.Min(0.99, request.TopP)
+ request.TopP = math.Max(0.01, request.TopP)
-// func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
-// if request == nil {
-// return nil, errors.New("request is nil")
-// }
-// newRequest := ImageRequest{
-// Model: request.Model,
-// Prompt: request.Prompt,
-// UserId: request.User,
-// }
-// return newRequest, nil
-// }
+ // Temperature (0.0, 1.0)
+ request.Temperature = math.Min(0.99, request.Temperature)
+ request.Temperature = math.Max(0.01, request.Temperature)
+ a.SetVersionByModeName(request.Model)
+ if a.APIVersion == "v4" {
+ return request, nil
+ }
+ return ConvertRequest(*request), nil
+ }
+}
-// func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
-// return adaptor.DoRequestHelper(a, c, meta, requestBody)
-// }
+func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
+ if request == nil {
+ return nil, errors.New("request is nil")
+ }
+ newRequest := ImageRequest{
+ Model: request.Model,
+ Prompt: request.Prompt,
+ UserId: request.User,
+ }
+ return newRequest, nil
+}
-// func (a *Adaptor) DoResponseV4(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
-// if meta.IsStream {
-// err, _, usage = openai.StreamHandler(c, resp, meta.Mode)
-// } else {
-// err, usage = openai.Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
-// }
-// return
-// }
+func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
+ return adaptor.DoRequestHelper(a, c, meta, requestBody)
+}
-// func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
-// switch meta.Mode {
-// case relaymode.Embeddings:
-// err, usage = EmbeddingsHandler(c, resp)
-// return
-// case relaymode.ImagesGenerations:
-// err, usage = openai.ImageHandler(c, resp)
-// return
-// }
-// if a.APIVersion == "v4" {
-// return a.DoResponseV4(c, resp, meta)
-// }
-// if meta.IsStream {
-// err, usage = StreamHandler(c, resp)
-// } else {
-// if meta.Mode == relaymode.Embeddings {
-// err, usage = EmbeddingsHandler(c, resp)
-// } else {
-// err, usage = Handler(c, resp)
-// }
-// }
-// return
-// }
+func (a *Adaptor) DoResponseV4(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
+ if meta.IsStream {
+ err, _, usage = openai.StreamHandler(c, resp, meta.Mode)
+ } else {
+ err, usage = openai.Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
+ }
+ return
+}
-// func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
-// return &EmbeddingRequest{
-// Model: "embedding-2",
-// Input: request.Input.(string),
-// }
-// }
+func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
+ switch meta.Mode {
+ case relaymode.Embeddings:
+ err, usage = EmbeddingsHandler(c, resp)
+ return
+ case relaymode.ImagesGenerations:
+ err, usage = openai.ImageHandler(c, resp)
+ return
+ }
+ if a.APIVersion == "v4" {
+ return a.DoResponseV4(c, resp, meta)
+ }
+ if meta.IsStream {
+ err, usage = StreamHandler(c, resp)
+ } else {
+ if meta.Mode == relaymode.Embeddings {
+ err, usage = EmbeddingsHandler(c, resp)
+ } else {
+ err, usage = Handler(c, resp)
+ }
+ }
+ return
+}
-// func (a *Adaptor) GetModelList() []string {
-// return ModelList
-// }
+func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
+ return &EmbeddingRequest{
+ Model: "embedding-2",
+ Input: request.Input.(string),
+ }
+}
-// func (a *Adaptor) GetChannelName() string {
-// return "zhipu"
-// }
+func (a *Adaptor) GetModelList() []string {
+ return ModelList
+}
+
+func (a *Adaptor) GetChannelName() string {
+ return "zhipu"
+}
diff --git a/relay/adaptor/zhipu/constants.go b/relay/adaptor/zhipu/constants.go
index acbc5c91..e1192123 100644
--- a/relay/adaptor/zhipu/constants.go
+++ b/relay/adaptor/zhipu/constants.go
@@ -1,5 +1,7 @@
package zhipu
-// var ModelList = []string{
-// "chatglm_turbo", "chatglm_pro", "chatglm_std", "chatglm_lite",
-// }
+var ModelList = []string{
+ "chatglm_turbo", "chatglm_pro", "chatglm_std", "chatglm_lite",
+ "glm-4", "glm-4v", "glm-3-turbo", "embedding-2",
+ "cogview-3",
+}
diff --git a/relay/adaptor/zhipu/main.go b/relay/adaptor/zhipu/main.go
index 8340abc9..b582a6d3 100644
--- a/relay/adaptor/zhipu/main.go
+++ b/relay/adaptor/zhipu/main.go
@@ -1,301 +1,304 @@
package zhipu
-// import (
-// "bufio"
-// "encoding/json"
-// "github.com/gin-gonic/gin"
-// "github.com/golang-jwt/jwt"
-// "io"
-// "net/http"
-// "one-api/common"
-// "strings"
-// "sync"
-// "time"
-// )
+import (
+ "bufio"
+ "encoding/json"
+ "io"
+ "net/http"
+ "strings"
+ "sync"
+ "time"
-// // https://open.bigmodel.cn/doc/api#chatglm_std
-// // chatglm_std, chatglm_lite
-// // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
-// // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/helper"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/constant"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/gin-gonic/gin"
+ "github.com/golang-jwt/jwt"
+)
-// type ZhipuMessage struct {
-// Role string `json:"role"`
-// Content string `json:"content"`
-// }
+// https://open.bigmodel.cn/doc/api#chatglm_std
+// chatglm_std, chatglm_lite
+// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
+// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
-// type ZhipuRequest struct {
-// Prompt []ZhipuMessage `json:"prompt"`
-// Temperature float64 `json:"temperature,omitempty"`
-// TopP float64 `json:"top_p,omitempty"`
-// RequestId string `json:"request_id,omitempty"`
-// Incremental bool `json:"incremental,omitempty"`
-// }
+var zhipuTokens sync.Map
+var expSeconds int64 = 24 * 3600
-// type ZhipuResponseData struct {
-// TaskId string `json:"task_id"`
-// RequestId string `json:"request_id"`
-// TaskStatus string `json:"task_status"`
-// Choices []ZhipuMessage `json:"choices"`
-// Usage `json:"usage"`
-// }
+func GetToken(apikey string) string {
+ data, ok := zhipuTokens.Load(apikey)
+ if ok {
+ tokenData := data.(tokenData)
+ if time.Now().Before(tokenData.ExpiryTime) {
+ return tokenData.Token
+ }
+ }
-// type ZhipuResponse struct {
-// Code int `json:"code"`
-// Msg string `json:"msg"`
-// Success bool `json:"success"`
-// Data ZhipuResponseData `json:"data"`
-// }
+ split := strings.Split(apikey, ".")
+ if len(split) != 2 {
+ logger.SysError("invalid zhipu key: " + apikey)
+ return ""
+ }
-// type ZhipuStreamMetaResponse struct {
-// RequestId string `json:"request_id"`
-// TaskId string `json:"task_id"`
-// TaskStatus string `json:"task_status"`
-// Usage `json:"usage"`
-// }
+ id := split[0]
+ secret := split[1]
-// type zhipuTokenData struct {
-// Token string
-// ExpiryTime time.Time
-// }
+ expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
+ expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
-// var zhipuTokens sync.Map
-// var expSeconds int64 = 24 * 3600
+ timestamp := time.Now().UnixNano() / 1e6
-// func getZhipuToken(apikey string) string {
-// data, ok := zhipuTokens.Load(apikey)
-// if ok {
-// tokenData := data.(zhipuTokenData)
-// if time.Now().Before(tokenData.ExpiryTime) {
-// return tokenData.Token
-// }
-// }
+ payload := jwt.MapClaims{
+ "api_key": id,
+ "exp": expMillis,
+ "timestamp": timestamp,
+ }
-// split := strings.Split(apikey, ".")
-// if len(split) != 2 {
-// common.SysError("invalid zhipu key: " + apikey)
-// return ""
-// }
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
-// id := split[0]
-// secret := split[1]
+ token.Header["alg"] = "HS256"
+ token.Header["sign_type"] = "SIGN"
-// expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
-// expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
+ tokenString, err := token.SignedString([]byte(secret))
+ if err != nil {
+ return ""
+ }
-// timestamp := time.Now().UnixNano() / 1e6
+ zhipuTokens.Store(apikey, tokenData{
+ Token: tokenString,
+ ExpiryTime: expiryTime,
+ })
-// payload := jwt.MapClaims{
-// "api_key": id,
-// "exp": expMillis,
-// "timestamp": timestamp,
-// }
+ return tokenString
+}
-// token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
+func ConvertRequest(request model.GeneralOpenAIRequest) *Request {
+ messages := make([]Message, 0, len(request.Messages))
+ for _, message := range request.Messages {
+ messages = append(messages, Message{
+ Role: message.Role,
+ Content: message.StringContent(),
+ })
+ }
+ return &Request{
+ Prompt: messages,
+ Temperature: request.Temperature,
+ TopP: request.TopP,
+ Incremental: false,
+ }
+}
-// token.Header["alg"] = "HS256"
-// token.Header["sign_type"] = "SIGN"
+func responseZhipu2OpenAI(response *Response) *openai.TextResponse {
+ fullTextResponse := openai.TextResponse{
+ Id: response.Data.TaskId,
+ Object: "chat.completion",
+ Created: helper.GetTimestamp(),
+ Choices: make([]openai.TextResponseChoice, 0, len(response.Data.Choices)),
+ Usage: response.Data.Usage,
+ }
+ for i, choice := range response.Data.Choices {
+ openaiChoice := openai.TextResponseChoice{
+ Index: i,
+ Message: model.Message{
+ Role: choice.Role,
+ Content: strings.Trim(choice.Content, "\""),
+ },
+ FinishReason: "",
+ }
+ if i == len(response.Data.Choices)-1 {
+ openaiChoice.FinishReason = "stop"
+ }
+ fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice)
+ }
+ return &fullTextResponse
+}
-// tokenString, err := token.SignedString([]byte(secret))
-// if err != nil {
-// return ""
-// }
+func streamResponseZhipu2OpenAI(zhipuResponse string) *openai.ChatCompletionsStreamResponse {
+ var choice openai.ChatCompletionsStreamResponseChoice
+ choice.Delta.Content = zhipuResponse
+ response := openai.ChatCompletionsStreamResponse{
+ Object: "chat.completion.chunk",
+ Created: helper.GetTimestamp(),
+ Model: "chatglm",
+ Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
+ }
+ return &response
+}
-// zhipuTokens.Store(apikey, zhipuTokenData{
-// Token: tokenString,
-// ExpiryTime: expiryTime,
-// })
+func streamMetaResponseZhipu2OpenAI(zhipuResponse *StreamMetaResponse) (*openai.ChatCompletionsStreamResponse, *model.Usage) {
+ var choice openai.ChatCompletionsStreamResponseChoice
+ choice.Delta.Content = ""
+ choice.FinishReason = &constant.StopFinishReason
+ response := openai.ChatCompletionsStreamResponse{
+ Id: zhipuResponse.RequestId,
+ Object: "chat.completion.chunk",
+ Created: helper.GetTimestamp(),
+ Model: "chatglm",
+ Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
+ }
+ return &response, &zhipuResponse.Usage
+}
-// return tokenString
-// }
+func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var usage *model.Usage
+ scanner := bufio.NewScanner(resp.Body)
+ scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+ if atEOF && len(data) == 0 {
+ return 0, nil, nil
+ }
+ if i := strings.Index(string(data), "\n\n"); i >= 0 && strings.Index(string(data), ":") >= 0 {
+ return i + 2, data[0:i], nil
+ }
+ if atEOF {
+ return len(data), data, nil
+ }
+ return 0, nil, nil
+ })
+ dataChan := make(chan string)
+ metaChan := make(chan string)
+ stopChan := make(chan bool)
+ go func() {
+ for scanner.Scan() {
+ data := scanner.Text()
+ lines := strings.Split(data, "\n")
+ for i, line := range lines {
+ if len(line) < 5 {
+ continue
+ }
+ if line[:5] == "data:" {
+ dataChan <- line[5:]
+ if i != len(lines)-1 {
+ dataChan <- "\n"
+ }
+ } else if line[:5] == "meta:" {
+ metaChan <- line[5:]
+ }
+ }
+ }
+ stopChan <- true
+ }()
+ common.SetEventStreamHeaders(c)
+ c.Stream(func(w io.Writer) bool {
+ select {
+ case data := <-dataChan:
+ response := streamResponseZhipu2OpenAI(data)
+ jsonResponse, err := json.Marshal(response)
+ if err != nil {
+ logger.SysError("error marshalling stream response: " + err.Error())
+ return true
+ }
+ c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
+ return true
+ case data := <-metaChan:
+ var zhipuResponse StreamMetaResponse
+ err := json.Unmarshal([]byte(data), &zhipuResponse)
+ if err != nil {
+ logger.SysError("error unmarshalling stream response: " + err.Error())
+ return true
+ }
+ response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
+ jsonResponse, err := json.Marshal(response)
+ if err != nil {
+ logger.SysError("error marshalling stream response: " + err.Error())
+ return true
+ }
+ usage = zhipuUsage
+ c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
+ return true
+ case <-stopChan:
+ c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
+ return false
+ }
+ })
+ err := resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ return nil, usage
+}
-// func requestOpenAI2Zhipu(request GeneralOpenAIRequest) *ZhipuRequest {
-// messages := make([]ZhipuMessage, 0, len(request.Messages))
-// for _, message := range request.Messages {
-// if message.Role == "system" {
-// messages = append(messages, ZhipuMessage{
-// Role: "system",
-// Content: message.Content,
-// })
-// messages = append(messages, ZhipuMessage{
-// Role: "user",
-// Content: "Okay",
-// })
-// } else {
-// messages = append(messages, ZhipuMessage{
-// Role: message.Role,
-// Content: message.Content,
-// })
-// }
-// }
-// return &ZhipuRequest{
-// Prompt: messages,
-// Temperature: request.Temperature,
-// TopP: request.TopP,
-// Incremental: false,
-// }
-// }
+func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var zhipuResponse Response
+ responseBody, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = json.Unmarshal(responseBody, &zhipuResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ if !zhipuResponse.Success {
+ return &model.ErrorWithStatusCode{
+ Error: model.Error{
+ Message: zhipuResponse.Msg,
+ Type: "zhipu_error",
+ Param: "",
+ Code: zhipuResponse.Code,
+ },
+ StatusCode: resp.StatusCode,
+ }, nil
+ }
+ fullTextResponse := responseZhipu2OpenAI(&zhipuResponse)
+ fullTextResponse.Model = "chatglm"
+ jsonResponse, err := json.Marshal(fullTextResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ c.Writer.Header().Set("Content-Type", "application/json")
+ c.Writer.WriteHeader(resp.StatusCode)
+ _, err = c.Writer.Write(jsonResponse)
+ return nil, &fullTextResponse.Usage
+}
-// func responseZhipu2OpenAI(response *ZhipuResponse) *OpenAITextResponse {
-// fullTextResponse := OpenAITextResponse{
-// Id: response.Data.TaskId,
-// Object: "chat.completion",
-// Created: common.GetTimestamp(),
-// Choices: make([]OpenAITextResponseChoice, 0, len(response.Data.Choices)),
-// Usage: response.Data.Usage,
-// }
-// for i, choice := range response.Data.Choices {
-// openaiChoice := OpenAITextResponseChoice{
-// Index: i,
-// Message: Message{
-// Role: choice.Role,
-// Content: strings.Trim(choice.Content, "\""),
-// },
-// FinishReason: "",
-// }
-// if i == len(response.Data.Choices)-1 {
-// openaiChoice.FinishReason = "stop"
-// }
-// fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice)
-// }
-// return &fullTextResponse
-// }
+func EmbeddingsHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
+ var zhipuResponse EmbeddingResponse
+ responseBody, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = resp.Body.Close()
+ if err != nil {
+ return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+ }
+ err = json.Unmarshal(responseBody, &zhipuResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ fullTextResponse := embeddingResponseZhipu2OpenAI(&zhipuResponse)
+ jsonResponse, err := json.Marshal(fullTextResponse)
+ if err != nil {
+ return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+ }
+ c.Writer.Header().Set("Content-Type", "application/json")
+ c.Writer.WriteHeader(resp.StatusCode)
+ _, err = c.Writer.Write(jsonResponse)
+ return nil, &fullTextResponse.Usage
+}
-// func streamResponseZhipu2OpenAI(zhipuResponse string) *ChatCompletionsStreamResponse {
-// var choice ChatCompletionsStreamResponseChoice
-// choice.Delta.Content = zhipuResponse
-// response := ChatCompletionsStreamResponse{
-// Object: "chat.completion.chunk",
-// Created: common.GetTimestamp(),
-// Model: "chatglm",
-// Choices: []ChatCompletionsStreamResponseChoice{choice},
-// }
-// return &response
-// }
+func embeddingResponseZhipu2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse {
+ openAIEmbeddingResponse := openai.EmbeddingResponse{
+ Object: "list",
+ Data: make([]openai.EmbeddingResponseItem, 0, len(response.Embeddings)),
+ Model: response.Model,
+ Usage: model.Usage{
+ PromptTokens: response.PromptTokens,
+ CompletionTokens: response.CompletionTokens,
+ TotalTokens: response.Usage.TotalTokens,
+ },
+ }
-// func streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStreamMetaResponse) (*ChatCompletionsStreamResponse, *Usage) {
-// var choice ChatCompletionsStreamResponseChoice
-// choice.Delta.Content = ""
-// choice.FinishReason = &stopFinishReason
-// response := ChatCompletionsStreamResponse{
-// Id: zhipuResponse.RequestId,
-// Object: "chat.completion.chunk",
-// Created: common.GetTimestamp(),
-// Model: "chatglm",
-// Choices: []ChatCompletionsStreamResponseChoice{choice},
-// }
-// return &response, &zhipuResponse.Usage
-// }
-
-// func zhipuStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
-// var usage *Usage
-// scanner := bufio.NewScanner(resp.Body)
-// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
-// if atEOF && len(data) == 0 {
-// return 0, nil, nil
-// }
-// if i := strings.Index(string(data), "\n\n"); i >= 0 && strings.Index(string(data), ":") >= 0 {
-// return i + 2, data[0:i], nil
-// }
-// if atEOF {
-// return len(data), data, nil
-// }
-// return 0, nil, nil
-// })
-// dataChan := make(chan string)
-// metaChan := make(chan string)
-// stopChan := make(chan bool)
-// go func() {
-// for scanner.Scan() {
-// data := scanner.Text()
-// lines := strings.Split(data, "\n")
-// for i, line := range lines {
-// if len(line) < 5 {
-// continue
-// }
-// if line[:5] == "data:" {
-// dataChan <- line[5:]
-// if i != len(lines)-1 {
-// dataChan <- "\n"
-// }
-// } else if line[:5] == "meta:" {
-// metaChan <- line[5:]
-// }
-// }
-// }
-// stopChan <- true
-// }()
-// setEventStreamHeaders(c)
-// c.Stream(func(w io.Writer) bool {
-// select {
-// case data := <-dataChan:
-// response := streamResponseZhipu2OpenAI(data)
-// jsonResponse, err := json.Marshal(response)
-// if err != nil {
-// common.SysError("error marshalling stream response: " + err.Error())
-// return true
-// }
-// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
-// return true
-// case data := <-metaChan:
-// var zhipuResponse ZhipuStreamMetaResponse
-// err := json.Unmarshal([]byte(data), &zhipuResponse)
-// if err != nil {
-// common.SysError("error unmarshalling stream response: " + err.Error())
-// return true
-// }
-// response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
-// jsonResponse, err := json.Marshal(response)
-// if err != nil {
-// common.SysError("error marshalling stream response: " + err.Error())
-// return true
-// }
-// usage = zhipuUsage
-// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
-// return true
-// case <-stopChan:
-// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
-// return false
-// }
-// })
-// err := resp.Body.Close()
-// if err != nil {
-// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
-// return nil, usage
-// }
-
-// func zhipuHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
-// var zhipuResponse ZhipuResponse
-// responseBody, err := io.ReadAll(resp.Body)
-// if err != nil {
-// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = resp.Body.Close()
-// if err != nil {
-// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
-// }
-// err = json.Unmarshal(responseBody, &zhipuResponse)
-// if err != nil {
-// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// if !zhipuResponse.Success {
-// return &OpenAIErrorWithStatusCode{
-// OpenAIError: OpenAIError{
-// Message: zhipuResponse.Msg,
-// Type: "zhipu_error",
-// Param: "",
-// Code: zhipuResponse.Code,
-// },
-// StatusCode: resp.StatusCode,
-// }, nil
-// }
-// fullTextResponse := responseZhipu2OpenAI(&zhipuResponse)
-// jsonResponse, err := json.Marshal(fullTextResponse)
-// if err != nil {
-// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
-// }
-// c.Writer.Header().Set("Content-Type", "application/json")
-// c.Writer.WriteHeader(resp.StatusCode)
-// _, err = c.Writer.Write(jsonResponse)
-// return nil, &fullTextResponse.Usage
-// }
+ for _, item := range response.Embeddings {
+ openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{
+ Object: `embedding`,
+ Index: item.Index,
+ Embedding: item.Embedding,
+ })
+ }
+ return &openAIEmbeddingResponse
+}
diff --git a/relay/adaptor/zhipu/model.go b/relay/adaptor/zhipu/model.go
index c2782aab..02e9ebde 100644
--- a/relay/adaptor/zhipu/model.go
+++ b/relay/adaptor/zhipu/model.go
@@ -1,46 +1,71 @@
package zhipu
-// import (
-// "github.com/songquanpeng/one-api/relay/model"
-// "time"
-// )
+import (
+ "time"
-// type Message struct {
-// Role string `json:"role"`
-// Content string `json:"content"`
-// }
+ "github.com/Laisky/one-api/relay/model"
+)
-// type Request struct {
-// Prompt []Message `json:"prompt"`
-// Temperature float64 `json:"temperature,omitempty"`
-// TopP float64 `json:"top_p,omitempty"`
-// RequestId string `json:"request_id,omitempty"`
-// Incremental bool `json:"incremental,omitempty"`
-// }
+type Message struct {
+ Role string `json:"role"`
+ Content string `json:"content"`
+}
-// type ResponseData struct {
-// TaskId string `json:"task_id"`
-// RequestId string `json:"request_id"`
-// TaskStatus string `json:"task_status"`
-// Choices []Message `json:"choices"`
-// model.Usage `json:"usage"`
-// }
+type Request struct {
+ Prompt []Message `json:"prompt"`
+ Temperature float64 `json:"temperature,omitempty"`
+ TopP float64 `json:"top_p,omitempty"`
+ RequestId string `json:"request_id,omitempty"`
+ Incremental bool `json:"incremental,omitempty"`
+}
-// type Response struct {
-// Code int `json:"code"`
-// Msg string `json:"msg"`
-// Success bool `json:"success"`
-// Data ResponseData `json:"data"`
-// }
+type ResponseData struct {
+ TaskId string `json:"task_id"`
+ RequestId string `json:"request_id"`
+ TaskStatus string `json:"task_status"`
+ Choices []Message `json:"choices"`
+ model.Usage `json:"usage"`
+}
-// type StreamMetaResponse struct {
-// RequestId string `json:"request_id"`
-// TaskId string `json:"task_id"`
-// TaskStatus string `json:"task_status"`
-// model.Usage `json:"usage"`
-// }
+type Response struct {
+ Code int `json:"code"`
+ Msg string `json:"msg"`
+ Success bool `json:"success"`
+ Data ResponseData `json:"data"`
+}
-// type tokenData struct {
-// Token string
-// ExpiryTime time.Time
-// }
+type StreamMetaResponse struct {
+ RequestId string `json:"request_id"`
+ TaskId string `json:"task_id"`
+ TaskStatus string `json:"task_status"`
+ model.Usage `json:"usage"`
+}
+
+type tokenData struct {
+ Token string
+ ExpiryTime time.Time
+}
+
+type EmbeddingRequest struct {
+ Model string `json:"model"`
+ Input string `json:"input"`
+}
+
+type EmbeddingResponse struct {
+ Model string `json:"model"`
+ Object string `json:"object"`
+ Embeddings []EmbeddingData `json:"data"`
+ model.Usage `json:"usage"`
+}
+
+type EmbeddingData struct {
+ Index int `json:"index"`
+ Object string `json:"object"`
+ Embedding []float64 `json:"embedding"`
+}
+
+type ImageRequest struct {
+ Model string `json:"model"`
+ Prompt string `json:"prompt"`
+ UserId string `json:"user_id,omitempty"`
+}
diff --git a/relay/billing/billing.go b/relay/billing/billing.go
index a99d37ee..d22bce70 100644
--- a/relay/billing/billing.go
+++ b/relay/billing/billing.go
@@ -3,8 +3,8 @@ package billing
import (
"context"
"fmt"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/model"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/model"
)
func ReturnPreConsumedQuota(ctx context.Context, preConsumedQuota int64, tokenId int) {
diff --git a/relay/billing/ratio/group.go b/relay/billing/ratio/group.go
index 8e9c5b73..00813ae9 100644
--- a/relay/billing/ratio/group.go
+++ b/relay/billing/ratio/group.go
@@ -2,7 +2,7 @@ package ratio
import (
"encoding/json"
- "github.com/songquanpeng/one-api/common/logger"
+ "github.com/Laisky/one-api/common/logger"
)
var GroupRatio = map[string]float64{
diff --git a/relay/billing/ratio/model.go b/relay/billing/ratio/model.go
index fdabeddc..3b7bc3c5 100644
--- a/relay/billing/ratio/model.go
+++ b/relay/billing/ratio/model.go
@@ -4,7 +4,7 @@ import (
"encoding/json"
"strings"
- "github.com/songquanpeng/one-api/common/logger"
+ "github.com/Laisky/one-api/common/logger"
)
const (
diff --git a/relay/channel/tencent/main.go b/relay/channel/tencent/main.go
index aa87e9ce..3fef2c6d 100644
--- a/relay/channel/tencent/main.go
+++ b/relay/channel/tencent/main.go
@@ -9,12 +9,12 @@ package tencent
// "github.com/Laisky/errors/v2"
// "fmt"
// "github.com/gin-gonic/gin"
-// "github.com/songquanpeng/one-api/common"
-// "github.com/songquanpeng/one-api/common/helper"
-// "github.com/songquanpeng/one-api/common/logger"
-// "github.com/songquanpeng/one-api/relay/channel/openai"
-// "github.com/songquanpeng/one-api/relay/constant"
-// "github.com/songquanpeng/one-api/relay/model"
+// "github.com/Laisky/one-api/common"
+// "github.com/Laisky/one-api/common/helper"
+// "github.com/Laisky/one-api/common/logger"
+// "github.com/Laisky/one-api/relay/channel/openai"
+// "github.com/Laisky/one-api/relay/constant"
+// "github.com/Laisky/one-api/relay/model"
// "io"
// "net/http"
// "sort"
diff --git a/relay/channeltype/helper.go b/relay/channeltype/helper.go
index 89e40142..50b6b524 100644
--- a/relay/channeltype/helper.go
+++ b/relay/channeltype/helper.go
@@ -1,6 +1,6 @@
package channeltype
-import "github.com/songquanpeng/one-api/relay/apitype"
+import "github.com/Laisky/one-api/relay/apitype"
func ToAPIType(channelType int) int {
apiType := apitype.OpenAI
diff --git a/relay/client/init.go b/relay/client/init.go
index 73108700..62661b32 100644
--- a/relay/client/init.go
+++ b/relay/client/init.go
@@ -6,7 +6,7 @@ import (
"time"
gutils "github.com/Laisky/go-utils/v4"
- "github.com/songquanpeng/one-api/common/config"
+ "github.com/Laisky/one-api/common/config"
)
var HTTPClient *http.Client
diff --git a/relay/controller/audio.go b/relay/controller/audio.go
index 58094c22..9f7970be 100644
--- a/relay/controller/audio.go
+++ b/relay/controller/audio.go
@@ -7,19 +7,19 @@ import (
"encoding/json"
"fmt"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/model"
+ "github.com/Laisky/one-api/relay/adaptor/azure"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/billing"
+ billingratio "github.com/Laisky/one-api/relay/billing/ratio"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/client"
+ relaymodel "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/relay/adaptor/azure"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/billing"
- billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/client"
- relaymodel "github.com/songquanpeng/one-api/relay/model"
- "github.com/songquanpeng/one-api/relay/relaymode"
"io"
"net/http"
"strings"
diff --git a/relay/controller/error.go b/relay/controller/error.go
index 69ece3ec..298c611d 100644
--- a/relay/controller/error.go
+++ b/relay/controller/error.go
@@ -3,9 +3,9 @@ package controller
import (
"encoding/json"
"fmt"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/relay/model"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay/model"
"io"
"net/http"
"strconv"
diff --git a/relay/controller/helper.go b/relay/controller/helper.go
index e07aba89..62054049 100644
--- a/relay/controller/helper.go
+++ b/relay/controller/helper.go
@@ -4,18 +4,18 @@ import (
"context"
"fmt"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/model"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ billingratio "github.com/Laisky/one-api/relay/billing/ratio"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/controller/validator"
+ "github.com/Laisky/one-api/relay/meta"
+ relaymodel "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/controller/validator"
- "github.com/songquanpeng/one-api/relay/meta"
- relaymodel "github.com/songquanpeng/one-api/relay/model"
- "github.com/songquanpeng/one-api/relay/relaymode"
"math"
"net/http"
)
diff --git a/relay/controller/image.go b/relay/controller/image.go
index 4079e450..822b8557 100644
--- a/relay/controller/image.go
+++ b/relay/controller/image.go
@@ -9,15 +9,15 @@ import (
"net/http"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/model"
+ "github.com/Laisky/one-api/relay"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ billingratio "github.com/Laisky/one-api/relay/billing/ratio"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/meta"
+ relaymodel "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/model"
- "github.com/songquanpeng/one-api/relay"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/meta"
- relaymodel "github.com/songquanpeng/one-api/relay/model"
)
func isWithinRange(element string, value int) bool {
diff --git a/relay/controller/text.go b/relay/controller/text.go
index d3ae6644..6e700c25 100644
--- a/relay/controller/text.go
+++ b/relay/controller/text.go
@@ -3,22 +3,21 @@ package controller
import (
"bytes"
"encoding/json"
- "fmt"
"io"
"net/http"
"strings"
"github.com/Laisky/errors/v2"
+ "github.com/Laisky/one-api/common/logger"
+ "github.com/Laisky/one-api/relay"
+ "github.com/Laisky/one-api/relay/adaptor/openai"
+ "github.com/Laisky/one-api/relay/apitype"
+ "github.com/Laisky/one-api/relay/billing"
+ billingratio "github.com/Laisky/one-api/relay/billing/ratio"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/meta"
+ "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/logger"
- "github.com/songquanpeng/one-api/relay"
- "github.com/songquanpeng/one-api/relay/adaptor/openai"
- "github.com/songquanpeng/one-api/relay/apitype"
- "github.com/songquanpeng/one-api/relay/billing"
- billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/meta"
- "github.com/songquanpeng/one-api/relay/model"
)
func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
@@ -95,14 +94,11 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
}
if resp != nil {
- errorHappened := (resp.StatusCode != http.StatusOK) || (meta.IsStream && resp.Header.Get("Content-Type") == "application/json")
+ errorHappened := (resp.StatusCode != http.StatusOK) || (meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json"))
if errorHappened {
billing.ReturnPreConsumedQuota(ctx, preConsumedQuota, meta.TokenId)
- logger.Error(ctx, fmt.Sprintf("relay text [%d] <- %q %q",
- resp.StatusCode, resp.Request.URL.String(), string(requestBodyBytes)))
return RelayErrorHandler(resp)
}
- meta.IsStream = meta.IsStream || strings.HasPrefix(resp.Header.Get("Content-Type"), "text/event-stream")
}
// do response
diff --git a/relay/controller/validator/validation.go b/relay/controller/validator/validation.go
index 3aab6ac8..60260749 100644
--- a/relay/controller/validator/validation.go
+++ b/relay/controller/validator/validation.go
@@ -4,8 +4,8 @@ import (
"math"
"github.com/Laisky/errors/v2"
- "github.com/songquanpeng/one-api/relay/model"
- "github.com/songquanpeng/one-api/relay/relaymode"
+ "github.com/Laisky/one-api/relay/model"
+ "github.com/Laisky/one-api/relay/relaymode"
)
func ValidateTextRequest(textRequest *model.GeneralOpenAIRequest, relayMode int) error {
diff --git a/relay/meta/relay_meta.go b/relay/meta/relay_meta.go
index a17aa0f0..788f6768 100644
--- a/relay/meta/relay_meta.go
+++ b/relay/meta/relay_meta.go
@@ -1,11 +1,11 @@
package meta
import (
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/relay/adaptor/azure"
+ "github.com/Laisky/one-api/relay/channeltype"
+ "github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/relay/adaptor/azure"
- "github.com/songquanpeng/one-api/relay/channeltype"
- "github.com/songquanpeng/one-api/relay/relaymode"
"strings"
)
diff --git a/router/api.go b/router/api.go
index b9e5de38..7d84e301 100644
--- a/router/api.go
+++ b/router/api.go
@@ -1,9 +1,9 @@
package router
import (
- "github.com/songquanpeng/one-api/controller"
- "github.com/songquanpeng/one-api/controller/auth"
- "github.com/songquanpeng/one-api/middleware"
+ "github.com/Laisky/one-api/controller"
+ "github.com/Laisky/one-api/controller/auth"
+ "github.com/Laisky/one-api/middleware"
"github.com/gin-contrib/gzip"
"github.com/gin-gonic/gin"
diff --git a/router/dashboard.go b/router/dashboard.go
index 5952d698..90b7d0bd 100644
--- a/router/dashboard.go
+++ b/router/dashboard.go
@@ -1,10 +1,10 @@
package router
import (
+ "github.com/Laisky/one-api/controller"
+ "github.com/Laisky/one-api/middleware"
"github.com/gin-contrib/gzip"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/controller"
- "github.com/songquanpeng/one-api/middleware"
)
func SetDashboardRouter(router *gin.Engine) {
diff --git a/router/main.go b/router/main.go
index 39d8c04f..2f1353b4 100644
--- a/router/main.go
+++ b/router/main.go
@@ -3,9 +3,9 @@ package router
import (
"embed"
"fmt"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/common/logger"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/common/logger"
"net/http"
"os"
"strings"
diff --git a/router/relay.go b/router/relay.go
index 65072c86..34117693 100644
--- a/router/relay.go
+++ b/router/relay.go
@@ -1,8 +1,8 @@
package router
import (
- "github.com/songquanpeng/one-api/controller"
- "github.com/songquanpeng/one-api/middleware"
+ "github.com/Laisky/one-api/controller"
+ "github.com/Laisky/one-api/middleware"
"github.com/gin-gonic/gin"
)
diff --git a/router/web.go b/router/web.go
index 3c9b4643..2a63bc06 100644
--- a/router/web.go
+++ b/router/web.go
@@ -3,13 +3,13 @@ package router
import (
"embed"
"fmt"
+ "github.com/Laisky/one-api/common"
+ "github.com/Laisky/one-api/common/config"
+ "github.com/Laisky/one-api/controller"
+ "github.com/Laisky/one-api/middleware"
"github.com/gin-contrib/gzip"
"github.com/gin-contrib/static"
"github.com/gin-gonic/gin"
- "github.com/songquanpeng/one-api/common"
- "github.com/songquanpeng/one-api/common/config"
- "github.com/songquanpeng/one-api/controller"
- "github.com/songquanpeng/one-api/middleware"
"net/http"
"strings"
)
diff --git a/web/README.md b/web/README.md
index 829271e2..926bdacc 100644
--- a/web/README.md
+++ b/web/README.md
@@ -22,7 +22,7 @@
默认主题,由 [JustSong](https://github.com/songquanpeng) 开发。
预览:
-|||
+|||
|:---:|:---:|
### 主题:berry
@@ -32,9 +32,9 @@
预览:
|||
|:---:|:---:|
-|||
-|||
-|||
+|||
+|||
+|||
### 主题:air
由 [Calon](https://github.com/Calcium-Ion) 开发。
@@ -44,4 +44,4 @@
#### 开发说明
-请查看 [web/berry/README.md](https://github.com/songquanpeng/one-api/tree/main/web/berry/README.md)
+请查看 [web/berry/README.md](https://github.com/Laisky/one-api/tree/main/web/berry/README.md)
diff --git a/web/air/src/components/Footer.js b/web/air/src/components/Footer.js
index 6fd0fa54..70d64095 100644
--- a/web/air/src/components/Footer.js
+++ b/web/air/src/components/Footer.js
@@ -38,7 +38,7 @@ const Footer = () => {
) : (
{systemName} {process.env.REACT_APP_VERSION}{' '}
diff --git a/web/air/src/components/OtherSetting.js b/web/air/src/components/OtherSetting.js
index ae924d9f..b1ddee5a 100644
--- a/web/air/src/components/OtherSetting.js
+++ b/web/air/src/components/OtherSetting.js
@@ -90,7 +90,7 @@ const OtherSetting = () => {
const openGitHubRelease = () => {
window.location =
- 'https://github.com/songquanpeng/one-api/releases/latest';
+ 'https://github.com/Laisky/one-api/releases/latest';
};
const checkUpdate = async () => {
@@ -141,7 +141,7 @@ const OtherSetting = () => {
主题名称( 当前可用主题)}
+ to='https://github.com/Laisky/one-api/blob/main/web/README.md'>当前可用主题)}
placeholder='请输入主题名称'
value={inputs.Theme}
name='Theme'
diff --git a/web/air/src/pages/About/index.js b/web/air/src/pages/About/index.js
index ec13f151..69a9c47c 100644
--- a/web/air/src/pages/About/index.js
+++ b/web/air/src/pages/About/index.js
@@ -37,8 +37,8 @@ const About = () => {
可在设置页面设置关于内容,支持 HTML & Markdown
项目仓库地址:
-
- https://github.com/songquanpeng/one-api
+
+ https://github.com/Laisky/one-api
> : <>
diff --git a/web/air/src/pages/Channel/EditChannel.js b/web/air/src/pages/Channel/EditChannel.js
index efb2cee8..e8a6b5e0 100644
--- a/web/air/src/pages/Channel/EditChannel.js
+++ b/web/air/src/pages/Channel/EditChannel.js
@@ -324,7 +324,7 @@ const EditChannel = (props) => {
注意,模型部署名称必须和模型名称保持一致 ,因为 One API 会把请求体中的
model
参数替换为你的部署名称(模型名称中的点会被剔除),图片演示 。
+ href='https://github.com/Laisky/one-api/issues/133?notification_referrer_id=NT_kwDOAmJSYrM2NjIwMzI3NDgyOjM5OTk4MDUw#issuecomment-1571602271'>图片演示 。
>
}>
diff --git a/web/air/src/pages/Home/index.js b/web/air/src/pages/Home/index.js
index 4803ba4e..1d660477 100644
--- a/web/air/src/pages/Home/index.js
+++ b/web/air/src/pages/Home/index.js
@@ -73,10 +73,10 @@ const Home = () => {
源码:
- https://github.com/songquanpeng/one-api
+ https://github.com/Laisky/one-api
启动时间:{getStartTimeString()}
@@ -127,4 +127,4 @@ const Home = () => {
);
};
-export default Home;
\ No newline at end of file
+export default Home;
diff --git a/web/berry/src/ui-component/Footer.js b/web/berry/src/ui-component/Footer.js
index 38f61993..b61447b4 100644
--- a/web/berry/src/ui-component/Footer.js
+++ b/web/berry/src/ui-component/Footer.js
@@ -15,7 +15,7 @@ const Footer = () => {
) : (
<>
-
+
{siteInfo.system_name} {process.env.REACT_APP_VERSION}{' '}
由{' '}
diff --git a/web/berry/src/views/About/index.js b/web/berry/src/views/About/index.js
index edbdaceb..fcfcb97f 100644
--- a/web/berry/src/views/About/index.js
+++ b/web/berry/src/views/About/index.js
@@ -41,7 +41,7 @@ const About = () => {
可在设置页面设置关于内容,支持 HTML & Markdown
项目仓库地址:
- https://github.com/songquanpeng/one-api
+ https://github.com/Laisky/one-api
diff --git a/web/berry/src/views/Home/baseIndex.js b/web/berry/src/views/Home/baseIndex.js
index c8f8ddef..68e35f04 100644
--- a/web/berry/src/views/Home/baseIndex.js
+++ b/web/berry/src/views/Home/baseIndex.js
@@ -26,7 +26,7 @@ const BaseIndex = () => (
}
- href="https://github.com/songquanpeng/one-api"
+ href="https://github.com/Laisky/one-api"
target="_blank"
sx={{ backgroundColor: '#24292e', color: '#fff', width: 'fit-content', boxShadow: '0 3px 5px 2px rgba(255, 105, 135, .3)' }}
>
diff --git a/web/berry/src/views/Setting/component/OtherSetting.js b/web/berry/src/views/Setting/component/OtherSetting.js
index 426b8c81..2c2ed0c8 100644
--- a/web/berry/src/views/Setting/component/OtherSetting.js
+++ b/web/berry/src/views/Setting/component/OtherSetting.js
@@ -106,7 +106,7 @@ const OtherSetting = () => {
};
const openGitHubRelease = () => {
- window.location = 'https://github.com/songquanpeng/one-api/releases/latest';
+ window.location = 'https://github.com/Laisky/one-api/releases/latest';
};
const checkUpdate = async () => {
diff --git a/web/default/src/components/Footer.js b/web/default/src/components/Footer.js
index c303e79b..4913e01c 100644
--- a/web/default/src/components/Footer.js
+++ b/web/default/src/components/Footer.js
@@ -38,7 +38,7 @@ const Footer = () => {
) : (
{systemName} {process.env.REACT_APP_VERSION}{' '}
diff --git a/web/default/src/components/OtherSetting.js b/web/default/src/components/OtherSetting.js
index ae924d9f..b1ddee5a 100644
--- a/web/default/src/components/OtherSetting.js
+++ b/web/default/src/components/OtherSetting.js
@@ -90,7 +90,7 @@ const OtherSetting = () => {
const openGitHubRelease = () => {
window.location =
- 'https://github.com/songquanpeng/one-api/releases/latest';
+ 'https://github.com/Laisky/one-api/releases/latest';
};
const checkUpdate = async () => {
@@ -141,7 +141,7 @@ const OtherSetting = () => {
主题名称( 当前可用主题)}
+ to='https://github.com/Laisky/one-api/blob/main/web/README.md'>当前可用主题)}
placeholder='请输入主题名称'
value={inputs.Theme}
name='Theme'
diff --git a/web/default/src/pages/About/index.js b/web/default/src/pages/About/index.js
index ec13f151..69a9c47c 100644
--- a/web/default/src/pages/About/index.js
+++ b/web/default/src/pages/About/index.js
@@ -37,8 +37,8 @@ const About = () => {
可在设置页面设置关于内容,支持 HTML & Markdown
项目仓库地址:
-
- https://github.com/songquanpeng/one-api
+
+ https://github.com/Laisky/one-api
> : <>
diff --git a/web/default/src/pages/Channel/EditChannel.js b/web/default/src/pages/Channel/EditChannel.js
index 2880ac98..bcf65ffa 100644
--- a/web/default/src/pages/Channel/EditChannel.js
+++ b/web/default/src/pages/Channel/EditChannel.js
@@ -54,6 +54,11 @@ const EditChannel = () => {
const [basicModels, setBasicModels] = useState([]);
const [fullModels, setFullModels] = useState([]);
const [customModel, setCustomModel] = useState('');
+ const [config, setConfig] = useState({
+ region: '',
+ sk: '',
+ ak: ''
+ });
const handleInputChange = (e, { name, value }) => {
setInputs((inputs) => ({ ...inputs, [name]: value }));
if (name === 'type') {
@@ -65,6 +70,10 @@ const EditChannel = () => {
}
};
+ const handleConfigChange = (e, { name, value }) => {
+ setConfig((inputs) => ({ ...inputs, [name]: value }));
+ };
+
const loadChannel = async () => {
let res = await API.get(`/api/channel/${channelId}`);
const { success, message, data } = res.data;
@@ -83,6 +92,9 @@ const EditChannel = () => {
data.model_mapping = JSON.stringify(JSON.parse(data.model_mapping), null, 2);
}
setInputs(data);
+ if (data.config !== '') {
+ setConfig(JSON.parse(data.config));
+ }
setBasicModels(getChannelModels(data.type));
} else {
showError(message);
@@ -176,6 +188,7 @@ const EditChannel = () => {
let res;
localInputs.models = localInputs.models.join(',');
localInputs.group = localInputs.groups.join(',');
+ localInputs.config = JSON.stringify(config);
if (isEdit) {
res = await API.put(`/api/channel/`, { ...localInputs, id: parseInt(channelId) });
} else {
@@ -234,7 +247,7 @@ const EditChannel = () => {
注意,模型部署名称必须和模型名称保持一致 ,因为 One API 会把请求体中的 model
参数替换为你的部署名称(模型名称中的点会被剔除),图片演示 。
+ href='https://github.com/Laisky/one-api/issues/133?notification_referrer_id=NT_kwDOAmJSYrM2NjIwMzI3NDgyOjM5OTk4MDUw#issuecomment-1571602271'>图片演示 。
{
fluid
multiple
search
- onLabelClick={(e, { value }) => {copy(value).then()}}
+ onLabelClick={(e, { value }) => {
+ copy(value).then();
+ }}
selection
onChange={handleInputChange}
value={inputs.models}
@@ -403,11 +418,11 @@ const EditChannel = () => {
{
name='ak'
required
placeholder={'AWS IAM Access Key'}
- onChange={handleInputChange}
- value={inputs.ak}
+ onChange={handleConfigChange}
+ value={config.ak}
autoComplete=''
/>
{
name='sk'
required
placeholder={'AWS IAM Secret Key'}
- onChange={handleInputChange}
- value={inputs.sk}
+ onChange={handleConfigChange}
+ value={config.sk}
autoComplete=''
/>
diff --git a/web/default/src/pages/Home/index.js b/web/default/src/pages/Home/index.js
index 63d6d77a..f0b5c5b7 100644
--- a/web/default/src/pages/Home/index.js
+++ b/web/default/src/pages/Home/index.js
@@ -69,10 +69,10 @@ const Home = () => {
源码:
- https://github.com/songquanpeng/one-api
+ https://github.com/Laisky/one-api
启动时间:{getStartTimeString()}