Skip to content

Instantly share code, notes, and snippets.

@ahafidi
Last active February 27, 2025 12:20
Show Gist options
  • Save ahafidi/3397fdfda6c8fe31e24c28762ac0a410 to your computer and use it in GitHub Desktop.
Save ahafidi/3397fdfda6c8fe31e24c28762ac0a410 to your computer and use it in GitHub Desktop.
from mistralai import Mistral
client = Mistral(api_key=MISTRAL_API_KEY)
model_instructions = "..."
# for chunk in ...
def with_synchronous_streaming(user_prompt: str):
messages = [
{
"role": "system",
"content": model_instructions,
},
{
"role": "user",
"content": user_prompt,
}
]
for chunk in client.chat.stream(model=LLM_MODEL, messages=messages):
# ^
# return a synchronous generator
print(chunk.data.choices[0].delta.content)
# async for chunk in ...
async def with_asynchronous_streaming(user_prompt: str):
messages = [
{
"role": "system",
"content": model_instructions,
},
{
"role": "user",
"content": user_prompt,
}
]
async for chunk in await client.chat.stream_async(model=LLM_MODEL, messages=messages):
# ^
# return an asynchronous generator
if chunk.data.choices[0].delta.content is not None:
print(chunk.data.choices[0].delta.content)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment