From 88cc88c5d068cfc3b1fe65e99ed67234b6feeae8 Mon Sep 17 00:00:00 2001 From: CalciumIon <1808837298@qq.com> Date: Sat, 27 Jul 2024 17:51:05 +0800 Subject: [PATCH] feat: support ollama tools --- relay/channel/ollama/dto.go | 23 ++++++++++++----------- relay/channel/ollama/relay-ollama.go | 20 ++++++++++++-------- 2 files changed, 24 insertions(+), 19 deletions(-) diff --git a/relay/channel/ollama/dto.go b/relay/channel/ollama/dto.go index a6d6238..4f99a24 100644 --- a/relay/channel/ollama/dto.go +++ b/relay/channel/ollama/dto.go @@ -3,14 +3,18 @@ package ollama import "one-api/dto" type OllamaRequest struct { - Model string `json:"model,omitempty"` - Messages []dto.Message `json:"messages,omitempty"` - Stream bool `json:"stream,omitempty"` - Temperature float64 `json:"temperature,omitempty"` - Seed float64 `json:"seed,omitempty"` - Topp float64 `json:"top_p,omitempty"` - TopK int `json:"top_k,omitempty"` - Stop any `json:"stop,omitempty"` + Model string `json:"model,omitempty"` + Messages []dto.Message `json:"messages,omitempty"` + Stream bool `json:"stream,omitempty"` + Temperature float64 `json:"temperature,omitempty"` + Seed float64 `json:"seed,omitempty"` + Topp float64 `json:"top_p,omitempty"` + TopK int `json:"top_k,omitempty"` + Stop any `json:"stop,omitempty"` + Tools []dto.ToolCall `json:"tools,omitempty"` + ResponseFormat *dto.ResponseFormat `json:"response_format,omitempty"` + FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` + PresencePenalty float64 `json:"presence_penalty,omitempty"` } type OllamaEmbeddingRequest struct { @@ -21,6 +25,3 @@ type OllamaEmbeddingRequest struct { type OllamaEmbeddingResponse struct { Embedding []float64 `json:"embedding,omitempty"` } - -//type OllamaOptions struct { -//} diff --git a/relay/channel/ollama/relay-ollama.go b/relay/channel/ollama/relay-ollama.go index f63fe57..6bf395a 100644 --- a/relay/channel/ollama/relay-ollama.go +++ b/relay/channel/ollama/relay-ollama.go @@ -28,14 +28,18 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest { Stop, _ = request.Stop.([]string) } return &OllamaRequest{ - Model: request.Model, - Messages: messages, - Stream: request.Stream, - Temperature: request.Temperature, - Seed: request.Seed, - Topp: request.TopP, - TopK: request.TopK, - Stop: Stop, + Model: request.Model, + Messages: messages, + Stream: request.Stream, + Temperature: request.Temperature, + Seed: request.Seed, + Topp: request.TopP, + TopK: request.TopK, + Stop: Stop, + Tools: request.Tools, + ResponseFormat: request.ResponseFormat, + FrequencyPenalty: request.FrequencyPenalty, + PresencePenalty: request.PresencePenalty, } }