Merge commit '2369025842b828ac38f4427fd1ebab8d03b1fe7f'

This commit is contained in:
Laisky.Cai
2024-04-20 01:07:29 +00:00
139 changed files with 2642 additions and 2625 deletions

View File

@@ -3,13 +3,13 @@ package openai
import (
"fmt"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/relay/adaptor"
"github.com/Laisky/one-api/relay/adaptor/minimax"
"github.com/Laisky/one-api/relay/channeltype"
"github.com/Laisky/one-api/relay/meta"
"github.com/Laisky/one-api/relay/model"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/adaptor/minimax"
"github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode"
"io"
"net/http"
"strings"
@@ -39,7 +39,7 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
task := strings.TrimPrefix(requestURL, "/v1/")
model_ := meta.ActualModelName
model_ = strings.Replace(model_, ".", "", -1)
//https://github.com/songquanpeng/one-api/issues/1191
//https://github.com/Laisky/one-api/issues/1191
// {your endpoint}/openai/deployments/{your azure_model}/chat/completions?api-version={api_version}
requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task)
return GetFullRequestURL(meta.BaseURL, requestURL, meta.ChannelType), nil
@@ -58,7 +58,7 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
}
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
if meta.ChannelType == channeltype.OpenRouter {
req.Header.Set("HTTP-Referer", "https://github.com/songquanpeng/one-api")
req.Header.Set("HTTP-Referer", "https://github.com/Laisky/one-api")
req.Header.Set("X-Title", "One API")
}
return nil

View File

@@ -1,15 +1,15 @@
package openai
import (
"github.com/songquanpeng/one-api/relay/adaptor/ai360"
"github.com/songquanpeng/one-api/relay/adaptor/baichuan"
"github.com/songquanpeng/one-api/relay/adaptor/groq"
"github.com/songquanpeng/one-api/relay/adaptor/lingyiwanwu"
"github.com/songquanpeng/one-api/relay/adaptor/minimax"
"github.com/songquanpeng/one-api/relay/adaptor/mistral"
"github.com/songquanpeng/one-api/relay/adaptor/moonshot"
"github.com/songquanpeng/one-api/relay/adaptor/stepfun"
"github.com/songquanpeng/one-api/relay/channeltype"
"github.com/Laisky/one-api/relay/adaptor/ai360"
"github.com/Laisky/one-api/relay/adaptor/baichuan"
"github.com/Laisky/one-api/relay/adaptor/groq"
"github.com/Laisky/one-api/relay/adaptor/lingyiwanwu"
"github.com/Laisky/one-api/relay/adaptor/minimax"
"github.com/Laisky/one-api/relay/adaptor/mistral"
"github.com/Laisky/one-api/relay/adaptor/moonshot"
"github.com/Laisky/one-api/relay/adaptor/stepfun"
"github.com/Laisky/one-api/relay/channeltype"
)
var CompatibleChannels = []int{

View File

@@ -2,8 +2,8 @@ package openai
import (
"fmt"
"github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/model"
"github.com/Laisky/one-api/relay/channeltype"
"github.com/Laisky/one-api/relay/model"
"strings"
)

View File

@@ -3,8 +3,8 @@ package openai
import (
"bytes"
"encoding/json"
"github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)

View File

@@ -4,15 +4,16 @@ import (
"bufio"
"bytes"
"encoding/json"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/conv"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode"
"io"
"net/http"
"strings"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/conv"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/relay/model"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
)
func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) {

View File

@@ -1,6 +1,6 @@
package openai
import "github.com/songquanpeng/one-api/relay/model"
import "github.com/Laisky/one-api/relay/model"
type TextContent struct {
Type string `json:"type,omitempty"`

View File

@@ -3,12 +3,12 @@ package openai
import (
"fmt"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/image"
"github.com/Laisky/one-api/common/logger"
billingratio "github.com/Laisky/one-api/relay/billing/ratio"
"github.com/Laisky/one-api/relay/model"
"github.com/pkoukk/tiktoken-go"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/image"
"github.com/songquanpeng/one-api/common/logger"
billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
"github.com/songquanpeng/one-api/relay/model"
"math"
"strings"
)

View File

@@ -1,6 +1,6 @@
package openai
import "github.com/songquanpeng/one-api/relay/model"
import "github.com/Laisky/one-api/relay/model"
func ErrorWrapper(err error, code string, statusCode int) *model.ErrorWithStatusCode {
Error := model.Error{