Skip to content

Instantly share code, notes, and snippets.

@AwokeKnowing
Forked from kwindla/bot.py
Created March 13, 2025 21:32
Show Gist options
  • Save AwokeKnowing/e3e0e0a08881831eb643dd03641c4a80 to your computer and use it in GitHub Desktop.
Save AwokeKnowing/e3e0e0a08881831eb643dd03641c4a80 to your computer and use it in GitHub Desktop.
Gemini Multimodal Live French tutor
import asyncio
import os
import sys
from dataclasses import dataclass
import aiohttp
from dotenv import load_dotenv
from loguru import logger
from pipecat.audio.vad.silero import SileroVADAnalyzer
from pipecat.frames.frames import LLMMessagesAppendFrame
from pipecat.pipeline.pipeline import Pipeline
from pipecat.pipeline.runner import PipelineRunner
from pipecat.pipeline.task import PipelineParams, PipelineTask
from pipecat.services.gemini_multimodal_live.gemini import GeminiMultimodalLiveLLMService
from pipecat.transports.services.daily import DailyParams, DailyTransport
load_dotenv(override=True)
logger.remove(0)
logger.add(sys.stderr, level="DEBUG")
conversation_system_message = """
Vous êtes un professeur de langue patient. Votre élève est anglophone et souhaite apprendre le français. Votre élève s'exprimera en français et en anglais. Répondez en français à moins qu'on vous demande expressément de parler anglais.
"""
async def main():
print("room", os.getenv("DAILY_ROOM_URL"))
print("token", os.getenv("DAILY_TOKEN"))
async with aiohttp.ClientSession() as session:
transport = DailyTransport(
os.getenv("DAILY_ROOM_URL"),
os.getenv("DAILY_TOKEN"),
"Respond bot",
DailyParams(
audio_out_enabled=True,
vad_enabled=True,
vad_analyzer=SileroVADAnalyzer(),
vad_audio_passthrough=True,
),
)
llm = GeminiMultimodalLiveLLMService(
api_key=os.getenv("GOOGLE_API_KEY"),
system_instruction=conversation_system_message,
)
pipeline = Pipeline(
[
transport.input(),
llm,
transport.output(),
]
)
task = PipelineTask(
pipeline,
params=PipelineParams(
allow_interruptions=True,
enable_metrics=True,
enable_usage_metrics=True,
),
)
@transport.event_handler("on_first_participant_joined")
async def on_first_participant_joined(transport, participant):
# Kick off the conversation.
await task.queue_frames(
[
LLMMessagesAppendFrame(
messages=[
{
"role": "user",
"content": "Saluez l'utilisateur.",
}
]
)
]
)
runner = PipelineRunner()
await runner.run(task)
if __name__ == "__main__":
asyncio.run(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment