...

Source file src/gitlab.hexacode.org/go-libs/chromem-go/embed_compat.go

Documentation: gitlab.hexacode.org/go-libs/chromem-go

     1  package chromem
     2  
     3  const (
     4  	baseURLMistral = "https://api.mistral.ai/v1"
     5  	// Currently there's only one. Let's turn this into a pseudo-enum as soon as there are more.
     6  	embeddingModelMistral = "mistral-embed"
     7  )
     8  
     9  // NewEmbeddingFuncMistral returns a function that creates embeddings for a text
    10  // using the Mistral API.
    11  func NewEmbeddingFuncMistral(apiKey string) EmbeddingFunc {
    12  	// Mistral embeddings are normalized, see section "Distance Measures" on
    13  	// https://docs.mistral.ai/guides/embeddings/.
    14  	normalized := true
    15  
    16  	// The Mistral API docs don't mention the `encoding_format` as optional,
    17  	// but it seems to be, just like OpenAI. So we reuse the OpenAI function.
    18  	return NewEmbeddingFuncOpenAICompat(baseURLMistral, apiKey, embeddingModelMistral, &normalized)
    19  }
    20  
    21  const baseURLJina = "https://api.jina.ai/v1"
    22  
    23  type EmbeddingModelJina string
    24  
    25  const (
    26  	EmbeddingModelJina2BaseEN   EmbeddingModelJina = "jina-embeddings-v2-base-en"
    27  	EmbeddingModelJina2BaseDE   EmbeddingModelJina = "jina-embeddings-v2-base-de"
    28  	EmbeddingModelJina2BaseCode EmbeddingModelJina = "jina-embeddings-v2-base-code"
    29  	EmbeddingModelJina2BaseZH   EmbeddingModelJina = "jina-embeddings-v2-base-zh"
    30  )
    31  
    32  // NewEmbeddingFuncJina returns a function that creates embeddings for a text
    33  // using the Jina API.
    34  func NewEmbeddingFuncJina(apiKey string, model EmbeddingModelJina) EmbeddingFunc {
    35  	return NewEmbeddingFuncOpenAICompat(baseURLJina, apiKey, string(model), nil)
    36  }
    37  
    38  const baseURLMixedbread = "https://api.mixedbread.ai"
    39  
    40  type EmbeddingModelMixedbread string
    41  
    42  const (
    43  	EmbeddingModelMixedbreadUAELargeV1          EmbeddingModelMixedbread = "UAE-Large-V1"
    44  	EmbeddingModelMixedbreadBGELargeENV15       EmbeddingModelMixedbread = "bge-large-en-v1.5"
    45  	EmbeddingModelMixedbreadGTELarge            EmbeddingModelMixedbread = "gte-large"
    46  	EmbeddingModelMixedbreadE5LargeV2           EmbeddingModelMixedbread = "e5-large-v2"
    47  	EmbeddingModelMixedbreadMultilingualE5Large EmbeddingModelMixedbread = "multilingual-e5-large"
    48  	EmbeddingModelMixedbreadMultilingualE5Base  EmbeddingModelMixedbread = "multilingual-e5-base"
    49  	EmbeddingModelMixedbreadAllMiniLML6V2       EmbeddingModelMixedbread = "all-MiniLM-L6-v2"
    50  	EmbeddingModelMixedbreadGTELargeZh          EmbeddingModelMixedbread = "gte-large-zh"
    51  )
    52  
    53  // NewEmbeddingFuncMixedbread returns a function that creates embeddings for a text
    54  // using the mixedbread.ai API.
    55  func NewEmbeddingFuncMixedbread(apiKey string, model EmbeddingModelMixedbread) EmbeddingFunc {
    56  	return NewEmbeddingFuncOpenAICompat(baseURLMixedbread, apiKey, string(model), nil)
    57  }
    58  
    59  const baseURLLocalAI = "http://localhost:8080/v1"
    60  
    61  // NewEmbeddingFuncLocalAI returns a function that creates embeddings for a text
    62  // using the LocalAI API.
    63  // You can start a LocalAI instance like this:
    64  //
    65  //	docker run -it -p 127.0.0.1:8080:8080 localai/localai:v2.7.0-ffmpeg-core bert-cpp
    66  //
    67  // And then call this constructor with model "bert-cpp-minilm-v6".
    68  // But other embedding models are supported as well. See the LocalAI documentation
    69  // for details.
    70  func NewEmbeddingFuncLocalAI(model string) EmbeddingFunc {
    71  	return NewEmbeddingFuncOpenAICompat(baseURLLocalAI, "", model, nil)
    72  }
    73  
    74  const (
    75  	azureDefaultAPIVersion = "2024-02-01"
    76  )
    77  
    78  // NewEmbeddingFuncAzureOpenAI returns a function that creates embeddings for a text
    79  // using the Azure OpenAI API.
    80  // The `deploymentURL` is the URL of the deployed model, e.g. "https://YOUR_RESOURCE_NAME.openai.azure.com/openai/deployments/YOUR_DEPLOYMENT_NAME"
    81  // See https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/embeddings?tabs=console#how-to-get-embeddings
    82  func NewEmbeddingFuncAzureOpenAI(apiKey string, deploymentURL string, apiVersion string, model string) EmbeddingFunc {
    83  	if apiVersion == "" {
    84  		apiVersion = azureDefaultAPIVersion
    85  	}
    86  	return newEmbeddingFuncOpenAICompat(deploymentURL, apiKey, model, nil, map[string]string{"api-key": apiKey}, map[string]string{"api-version": apiVersion})
    87  }
    88  

View as plain text