安装¶
安装所需软件包
In [ ]
已复制!
%pip install llama-index-llms-qianfan
%pip install llama-index-llms-qianfan
初始化¶
In [ ]
已复制!
from llama_index.llms.qianfan import Qianfan
import asyncio
access_key = "XXX"
secret_key = "XXX"
model_name = "ERNIE-Speed-8K"
endpoint_url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_speed"
context_window = 8192
llm = Qianfan(access_key, secret_key, model_name, endpoint_url, context_window)
from llama_index.llms.qianfan import Qianfan import asyncio access_key = "XXX" secret_key = "XXX" model_name = "ERNIE-Speed-8K" endpoint_url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_speed" context_window = 8192 llm = Qianfan(access_key, secret_key, model_name, endpoint_url, context_window)
同步聊天¶
使用 chat
方法同步生成聊天响应
In [ ]
已复制!
from llama_index.core.base.llms.types import ChatMessage
messages = [
ChatMessage(role="user", content="Tell me a joke."),
]
chat_response = llm.chat(messages)
print(chat_response.message.content)
from llama_index.core.base.llms.types import ChatMessage messages = [ ChatMessage(role="user", content="Tell me a joke."), ] chat_response = llm.chat(messages) print(chat_response.message.content)
同步流式聊天¶
使用 stream_chat
方法同步生成流式聊天响应
In [ ]
已复制!
messages = [
ChatMessage(role="system", content="You are a helpful assistant."),
ChatMessage(role="user", content="Tell me a story."),
]
content = ""
for chat_response in llm.stream_chat(messages):
content += chat_response.delta
print(chat_response.delta, end="")
messages = [ ChatMessage(role="system", content="You are a helpful assistant."), ChatMessage(role="user", content="Tell me a story."), ] content = "" for chat_response in llm.stream_chat(messages): content += chat_response.delta print(chat_response.delta, end="")
异步聊天¶
使用 achat
方法异步生成聊天响应
In [ ]
已复制!
async def async_chat():
messages = [
ChatMessage(role="user", content="Tell me an async joke."),
]
chat_response = await llm.achat(messages)
print(chat_response.message.content)
asyncio.run(async_chat())
async def async_chat(): messages = [ ChatMessage(role="user", content="Tell me an async joke."), ] chat_response = await llm.achat(messages) print(chat_response.message.content) asyncio.run(async_chat())
异步流式聊天¶
使用 astream_chat
方法异步生成流式聊天响应
In [ ]
已复制!
async def async_stream_chat():
messages = [
ChatMessage(role="system", content="You are a helpful assistant."),
ChatMessage(role="user", content="Tell me an async story."),
]
content = ""
response = await llm.astream_chat(messages)
async for chat_response in response:
content += chat_response.delta
print(chat_response.delta, end="")
asyncio.run(async_stream_chat())
async def async_stream_chat(): messages = [ ChatMessage(role="system", content="You are a helpful assistant."), ChatMessage(role="user", content="Tell me an async story."), ] content = "" response = await llm.astream_chat(messages) async for chat_response in response: content += chat_response.delta print(chat_response.delta, end="") asyncio.run(async_stream_chat())