DeepInfra¶
In [ ]
已复制!
%pip install llama-index-llms-deepinfra
%pip install llama-index-llms-deepinfra
初始化¶
使用您的 API 密钥和所需参数设置 DeepInfraLLM
类
In [ ]
已复制!
from llama_index.llms.deepinfra import DeepInfraLLM
import asyncio
llm = DeepInfraLLM(
model="mistralai/Mixtral-8x22B-Instruct-v0.1", # Default model name
api_key="your-deepinfra-api-key", # Replace with your DeepInfra API key
temperature=0.5,
max_tokens=50,
additional_kwargs={"top_p": 0.9},
)
from llama_index.llms.deepinfra import DeepInfraLLM import asyncio llm = DeepInfraLLM( model="mistralai/Mixtral-8x22B-Instruct-v0.1", # 默认模型名称 api_key="your-deepinfra-api-key", # 替换为您的 DeepInfra API 密钥 temperature=0.5, max_tokens=50, additional_kwargs={"top_p": 0.9}, )
同步完成¶
使用 complete
方法同步生成文本完成
In [ ]
已复制!
response = llm.complete("Hello World!")
print(response.text)
response = llm.complete("Hello World!") print(response.text)
同步流完成¶
使用 stream_complete
方法同步生成流式文本完成
In [ ]
已复制!
content = ""
for completion in llm.stream_complete("Once upon a time"):
content += completion.delta
print(completion.delta, end="")
content = "" for completion in llm.stream_complete("Once upon a time"): content += completion.delta print(completion.delta, end="")
同步聊天¶
使用 chat
方法同步生成聊天响应
In [ ]
已复制!
from llama_index.core.base.llms.types import ChatMessage
messages = [
ChatMessage(role="user", content="Tell me a joke."),
]
chat_response = llm.chat(messages)
print(chat_response.message.content)
from llama_index.core.base.llms.types import ChatMessage messages = [ ChatMessage(role="user", content="Tell me a joke."), ] chat_response = llm.chat(messages) print(chat_response.message.content)
同步流聊天¶
使用 stream_chat
方法同步生成流式聊天响应
In [ ]
已复制!
messages = [
ChatMessage(role="system", content="You are a helpful assistant."),
ChatMessage(role="user", content="Tell me a story."),
]
content = ""
for chat_response in llm.stream_chat(messages):
content += chat_response.message.delta
print(chat_response.message.delta, end="")
messages = [ ChatMessage(role="system", content="You are a helpful assistant."), ChatMessage(role="user", content="Tell me a story."), ] content = "" for chat_response in llm.stream_chat(messages): content += chat_response.message.delta print(chat_response.message.delta, end="")
异步完成¶
使用 acomplete
方法异步生成文本完成
In [ ]
已复制!
async def async_complete():
response = await llm.acomplete("Hello Async World!")
print(response.text)
asyncio.run(async_complete())
async def async_complete(): response = await llm.acomplete("Hello Async World!") print(response.text) asyncio.run(async_complete())
异步流完成¶
使用 astream_complete
方法异步生成流式文本完成
In [ ]
已复制!
async def async_stream_complete():
content = ""
response = await llm.astream_complete("Once upon an async time")
async for completion in response:
content += completion.delta
print(completion.delta, end="")
asyncio.run(async_stream_complete())
async def async_stream_complete(): content = "" response = await llm.astream_complete("Once upon an async time") async for completion in response: content += completion.delta print(completion.delta, end="") asyncio.run(async_stream_complete())
异步聊天¶
使用 achat
方法异步生成聊天响应
In [ ]
已复制!
async def async_chat():
messages = [
ChatMessage(role="user", content="Tell me an async joke."),
]
chat_response = await llm.achat(messages)
print(chat_response.message.content)
asyncio.run(async_chat())
async def async_chat(): messages = [ ChatMessage(role="user", content="Tell me an async joke."), ] chat_response = await llm.achat(messages) print(chat_response.message.content) asyncio.run(async_chat())
异步流聊天¶
使用 astream_chat
方法异步生成流式聊天响应
In [ ]
已复制!
async def async_stream_chat():
messages = [
ChatMessage(role="system", content="You are a helpful assistant."),
ChatMessage(role="user", content="Tell me an async story."),
]
content = ""
response = await llm.astream_chat(messages)
async for chat_response in response:
content += chat_response.message.delta
print(chat_response.message.delta, end="")
asyncio.run(async_stream_chat())
async def async_stream_chat(): messages = [ ChatMessage(role="system", content="You are a helpful assistant."), ChatMessage(role="user", content="Tell me an async story."), ] content = "" response = await llm.astream_chat(messages) async for chat_response in response: content += chat_response.message.delta print(chat_response.message.delta, end="") asyncio.run(async_stream_chat())
如有任何问题或反馈,请联系我们:[email protected]。