In [ ]
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
%pip install llama-index-storage-docstore-firestore
%pip install llama-index-storage-kvstore-firestore
%pip install llama-index-storage-index-store-firestore
%pip install llama-index-llms-openai
!pip install llama-index
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
!pip install llama-index
import nest_asyncio nest_asyncio.apply()
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
import nest_asyncio
nest_asyncio.apply()
import logging import sys logging.basicConfig(stream=sys.stdout, level=logging.INFO) logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
import logging
import sys
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
from llama_index.core import SimpleDirectoryReader, StorageContext from llama_index.core import VectorStoreIndex, SimpleKeywordTableIndex from llama_index.core import SummaryIndex from llama_index.core import ComposableGraph from llama_index.llms.openai import OpenAI from llama_index.core.response.notebook_utils import display_response from llama_index.core import Settings
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
from llama_index.core import SimpleDirectoryReader, StorageContext
from llama_index.core import VectorStoreIndex, SimpleKeywordTableIndex
from llama_index.core import SummaryIndex
from llama_index.core import ComposableGraph
from llama_index.llms.openai import OpenAI
from llama_index.core.response.notebook_utils import display_response
from llama_index.core import Settings
下载数据¶
!mkdir -p 'data/paul_graham/' !wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
!mkdir -p 'data/paul_graham/'
!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'
加载文档¶
reader = SimpleDirectoryReader("./data/paul_graham/") documents = reader.load_data()
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
reader = SimpleDirectoryReader("./data/paul_graham/")
documents = reader.load_data()
解析为节点¶
from llama_index.core.node_parser import SentenceSplitter nodes = SentenceSplitter().get_nodes_from_documents(documents)
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
from llama_index.core.node_parser import SentenceSplitter
nodes = SentenceSplitter().get_nodes_from_documents(documents)
添加到文档存储¶
from llama_index.storage.kvstore.firestore import FirestoreKVStore from llama_index.storage.docstore.firestore import FirestoreDocumentStore from llama_index.storage.index_store.firestore import FirestoreIndexStore
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
from llama_index.storage.kvstore.firestore import FirestoreKVStore
from llama_index.storage.docstore.firestore import FirestoreDocumentStore
from llama_index.storage.index_store.firestore import FirestoreIndexStore
kvstore = FirestoreKVStore() storage_context = StorageContext.from_defaults( docstore=FirestoreDocumentStore(kvstore), index_store=FirestoreIndexStore(kvstore), )
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
kvstore = FirestoreKVStore()
storage_context = StorageContext.from_defaults(
docstore=FirestoreDocumentStore(kvstore),
index_store=FirestoreIndexStore(kvstore),
)
storage_context.docstore.add_documents(nodes)
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
storage_context.docstore.add_documents(nodes)
定义多个索引¶
每个索引使用相同的底层节点。
summary_index = SummaryIndex(nodes, storage_context=storage_context)
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
summary_index = SummaryIndex(nodes, storage_context=storage_context)
vector_index = VectorStoreIndex(nodes, storage_context=storage_context)
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
vector_index = VectorStoreIndex(nodes, storage_context=storage_context)
keyword_table_index = SimpleKeywordTableIndex( nodes, storage_context=storage_context )
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
keyword_table_index = SimpleKeywordTableIndex(
nodes, storage_context=storage_context
)
# NOTE: the docstore still has the same nodes len(storage_context.docstore.docs)
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
# NOTE: the docstore still has the same nodes
len(storage_context.docstore.docs)
测试保存和加载¶
# NOTE: docstore and index_store is persisted in Firestore by default # NOTE: here only need to persist simple vector store to disk storage_context.persist()
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
# NOTE: docstore and index_store is persisted in Firestore by default
# NOTE: here only need to persist simple vector store to disk
storage_context.persist()
# note down index IDs list_id = summary_index.index_id vector_id = vector_index.index_id keyword_id = keyword_table_index.index_id
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
# note down index IDs
list_id = summary_index.index_id
vector_id = vector_index.index_id
keyword_id = keyword_table_index.index_id
from llama_index.core import load_index_from_storage kvstore = FirestoreKVStore() # re-create storage context storage_context = StorageContext.from_defaults( docstore=FirestoreDocumentStore(kvstore), index_store=FirestoreIndexStore(kvstore), ) # load indices summary_index = load_index_from_storage( storage_context=storage_context, index_id=list_id ) vector_index = load_index_from_storage( storage_context=storage_context, vector_id=vector_id ) keyword_table_index = load_index_from_storage( storage_context=storage_context, keyword_id=keyword_id )
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
from llama_index.core import load_index_from_storage
kvstore = FirestoreKVStore()
# re-create storage context
storage_context = StorageContext.from_defaults(
docstore=FirestoreDocumentStore(kvstore),
index_store=FirestoreIndexStore(kvstore),
)
# load indices
summary_index = load_index_from_storage(
storage_context=storage_context, index_id=list_id
)
vector_index = load_index_from_storage(
storage_context=storage_context, vector_id=vector_id
)
keyword_table_index = load_index_from_storage(
storage_context=storage_context, keyword_id=keyword_id
)
测试一些查询¶
chatgpt = OpenAI(temperature=0, model="gpt-3.5-turbo") Settings.llm = chatgpt Settings.chunk_size = 1024
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
chatgpt = OpenAI(temperature=0, model="gpt-3.5-turbo")
Settings.llm = chatgpt
Settings.chunk_size = 1024
query_engine = summary_index.as_query_engine() list_response = query_engine.query("What is a summary of this document?")
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
query_engine = summary_index.as_query_engine()
list_response = query_engine.query("What is a summary of this document?")
display_response(list_response)
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
display_response(list_response)
query_engine = vector_index.as_query_engine() vector_response = query_engine.query("What did the author do growing up?")
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
query_engine = vector_index.as_query_engine()
vector_response = query_engine.query("What did the author do growing up?")
display_response(vector_response)
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
display_response(vector_response)
query_engine = keyword_table_index.as_query_engine() keyword_response = query_engine.query( "What did the author do after his time at YC?" )
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
query_engine = keyword_table_index.as_query_engine()
keyword_response = query_engine.query(
"What did the author do after his time at YC?"
)
display_response(keyword_response)
已复制!
%pip install llama-index-storage-docstore-firestore %pip install llama-index-storage-kvstore-firestore %pip install llama-index-storage-index-store-firestore %pip install llama-index-llms-openai
display_response(keyword_response)
返回顶部