mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-10-28 20:33:42 +08:00
Compare commits
3 Commits
v0.5.2-alp
...
v0.5.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b464e2907a | ||
|
|
d96cf2e84d | ||
|
|
446337c329 |
@@ -92,7 +92,7 @@ func openaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*O
|
||||
return nil, responseText
|
||||
}
|
||||
|
||||
func openaiHandler(c *gin.Context, resp *http.Response, consumeQuota bool) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||
func openaiHandler(c *gin.Context, resp *http.Response, consumeQuota bool, promptTokens int, model string) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||
var textResponse TextResponse
|
||||
if consumeQuota {
|
||||
responseBody, err := io.ReadAll(resp.Body)
|
||||
@@ -132,5 +132,17 @@ func openaiHandler(c *gin.Context, resp *http.Response, consumeQuota bool) (*Ope
|
||||
if err != nil {
|
||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
|
||||
if textResponse.Usage.TotalTokens == 0 {
|
||||
completionTokens := 0
|
||||
for _, choice := range textResponse.Choices {
|
||||
completionTokens += countTokenText(choice.Message.Content, model)
|
||||
}
|
||||
textResponse.Usage = Usage{
|
||||
PromptTokens: promptTokens,
|
||||
CompletionTokens: completionTokens,
|
||||
TotalTokens: promptTokens + completionTokens,
|
||||
}
|
||||
}
|
||||
return nil, &textResponse.Usage
|
||||
}
|
||||
|
||||
@@ -302,7 +302,7 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
||||
if err != nil {
|
||||
return errorWrapper(err, "close_request_body_failed", http.StatusInternalServerError)
|
||||
}
|
||||
isStream = strings.HasPrefix(resp.Header.Get("Content-Type"), "text/event-stream")
|
||||
isStream = isStream || strings.HasPrefix(resp.Header.Get("Content-Type"), "text/event-stream")
|
||||
}
|
||||
|
||||
var textResponse TextResponse
|
||||
@@ -362,7 +362,7 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
||||
textResponse.Usage.CompletionTokens = countTokenText(responseText, textRequest.Model)
|
||||
return nil
|
||||
} else {
|
||||
err, usage := openaiHandler(c, resp, consumeQuota)
|
||||
err, usage := openaiHandler(c, resp, consumeQuota, promptTokens, textRequest.Model)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -81,8 +81,9 @@ type OpenAIErrorWithStatusCode struct {
|
||||
}
|
||||
|
||||
type TextResponse struct {
|
||||
Usage `json:"usage"`
|
||||
Error OpenAIError `json:"error"`
|
||||
Choices []OpenAITextResponseChoice `json:"choices"`
|
||||
Usage `json:"usage"`
|
||||
Error OpenAIError `json:"error"`
|
||||
}
|
||||
|
||||
type OpenAITextResponseChoice struct {
|
||||
|
||||
@@ -96,7 +96,7 @@ const TokensTable = () => {
|
||||
url = `opencat://team/join?domain=${encodedServerAddress}&token=sk-${key}`;
|
||||
break;
|
||||
case 'next':
|
||||
url = `https://chatgpt1.nextweb.fun/#/?settings=%7B%22key%22:%22sk-${key}%22,%22url%22:%22${serverAddress}%22%7D`;
|
||||
url = `https://chat.oneapi.pro/#/?settings=%7B%22key%22:%22sk-${key}%22,%22url%22:%22${serverAddress}%22%7D`;
|
||||
break;
|
||||
default:
|
||||
url = `sk-${key}`;
|
||||
|
||||
Reference in New Issue
Block a user