mirror of
https://github.com/linux-do/new-api.git
synced 2025-09-17 16:06:38 +08:00
feat: support ollama tools
This commit is contained in:
parent
ab1d61d910
commit
88cc88c5d0
@ -3,14 +3,18 @@ package ollama
|
|||||||
import "one-api/dto"
|
import "one-api/dto"
|
||||||
|
|
||||||
type OllamaRequest struct {
|
type OllamaRequest struct {
|
||||||
Model string `json:"model,omitempty"`
|
Model string `json:"model,omitempty"`
|
||||||
Messages []dto.Message `json:"messages,omitempty"`
|
Messages []dto.Message `json:"messages,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
Seed float64 `json:"seed,omitempty"`
|
Seed float64 `json:"seed,omitempty"`
|
||||||
Topp float64 `json:"top_p,omitempty"`
|
Topp float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Stop any `json:"stop,omitempty"`
|
Stop any `json:"stop,omitempty"`
|
||||||
|
Tools []dto.ToolCall `json:"tools,omitempty"`
|
||||||
|
ResponseFormat *dto.ResponseFormat `json:"response_format,omitempty"`
|
||||||
|
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
|
||||||
|
PresencePenalty float64 `json:"presence_penalty,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type OllamaEmbeddingRequest struct {
|
type OllamaEmbeddingRequest struct {
|
||||||
@ -21,6 +25,3 @@ type OllamaEmbeddingRequest struct {
|
|||||||
type OllamaEmbeddingResponse struct {
|
type OllamaEmbeddingResponse struct {
|
||||||
Embedding []float64 `json:"embedding,omitempty"`
|
Embedding []float64 `json:"embedding,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
//type OllamaOptions struct {
|
|
||||||
//}
|
|
||||||
|
@ -28,14 +28,18 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest {
|
|||||||
Stop, _ = request.Stop.([]string)
|
Stop, _ = request.Stop.([]string)
|
||||||
}
|
}
|
||||||
return &OllamaRequest{
|
return &OllamaRequest{
|
||||||
Model: request.Model,
|
Model: request.Model,
|
||||||
Messages: messages,
|
Messages: messages,
|
||||||
Stream: request.Stream,
|
Stream: request.Stream,
|
||||||
Temperature: request.Temperature,
|
Temperature: request.Temperature,
|
||||||
Seed: request.Seed,
|
Seed: request.Seed,
|
||||||
Topp: request.TopP,
|
Topp: request.TopP,
|
||||||
TopK: request.TopK,
|
TopK: request.TopK,
|
||||||
Stop: Stop,
|
Stop: Stop,
|
||||||
|
Tools: request.Tools,
|
||||||
|
ResponseFormat: request.ResponseFormat,
|
||||||
|
FrequencyPenalty: request.FrequencyPenalty,
|
||||||
|
PresencePenalty: request.PresencePenalty,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user