Ollama 嵌入式¶
如果您在 colab 上打开此 Notebook,您可能需要安装 LlamaIndex 🦙。
In [ ]
已复制!
%pip install llama-index-embeddings-ollama
%pip install llama-index-embeddings-ollama
In [ ]
已复制!
!pip install llama-index
!pip install llama-index
In [ ]
已复制!
from llama_index.embeddings.ollama import OllamaEmbedding
ollama_embedding = OllamaEmbedding(
model_name="llama2",
base_url="http://localhost:11434",
ollama_additional_kwargs={"mirostat": 0},
)
pass_embedding = ollama_embedding.get_text_embedding_batch(
["This is a passage!", "This is another passage"], show_progress=True
)
print(pass_embedding)
query_embedding = ollama_embedding.get_query_embedding("Where is blue?")
print(query_embedding)
from llama_index.embeddings.ollama import OllamaEmbedding ollama_embedding = OllamaEmbedding( model_name="llama2", base_url="http://localhost:11434", ollama_additional_kwargs={"mirostat": 0}, ) pass_embedding = ollama_embedding.get_text_embedding_batch( ["This is a passage!", "This is another passage"], show_progress=True ) print(pass_embedding) query_embedding = ollama_embedding.get_query_embedding("Where is blue?") print(query_embedding)