From 2ffa4268fc6e6e47c1f67d3404b1b07bbeccd2b5 Mon Sep 17 00:00:00 2001 From: HowieWood <98788152+utopeadia@users.noreply.github.com> Date: Wed, 6 Nov 2024 01:21:02 +0000 Subject: [PATCH] Continue fixing Ollama embedding return issue --- relay/channel/ollama/dto.go | 2 +- relay/channel/ollama/relay-ollama.go | 11 ++++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/relay/channel/ollama/dto.go b/relay/channel/ollama/dto.go index 690fc77..980992a 100644 --- a/relay/channel/ollama/dto.go +++ b/relay/channel/ollama/dto.go @@ -37,5 +37,5 @@ type OllamaEmbeddingRequest struct { type OllamaEmbeddingResponse struct { Error string `json:"error,omitempty"` Model string `json:"model"` - Embedding []float64 `json:"embeddings,omitempty"` + Embedding [][]float64 `json:"embeddings,omitempty"` } diff --git a/relay/channel/ollama/relay-ollama.go b/relay/channel/ollama/relay-ollama.go index b2d4630..2ef716b 100644 --- a/relay/channel/ollama/relay-ollama.go +++ b/relay/channel/ollama/relay-ollama.go @@ -73,9 +73,10 @@ func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens in if ollamaEmbeddingResponse.Error != "" { return service.OpenAIErrorWrapper(err, "ollama_error", resp.StatusCode), nil } + flattenedEmbeddings := flattenEmbeddings(ollamaEmbeddingResponse.Embedding) data := make([]dto.OpenAIEmbeddingResponseItem, 0, 1) data = append(data, dto.OpenAIEmbeddingResponseItem{ - Embedding: ollamaEmbeddingResponse.Embedding, + Embedding: flattenedEmbeddings, Object: "embedding", }) usage := &dto.Usage{ @@ -120,3 +121,11 @@ func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens in } return nil, usage } + +func flattenEmbeddings(embeddings [][]float64) []float64 { +flattened := []float64{} +for _, row := range embeddings { + flattened = append(flattened, row...) +} +return flattened +} \ No newline at end of file