Skip to content

Instantly share code, notes, and snippets.

@sweemeng
Last active November 1, 2025 15:45
Show Gist options
  • Select an option

  • Save sweemeng/1d62a95b4ba54c6540b1004c28aaa174 to your computer and use it in GitHub Desktop.

Select an option

Save sweemeng/1d62a95b4ba54c6540b1004c28aaa174 to your computer and use it in GitHub Desktop.
Example code for pycon my 2025

Learn OOP or else

Aka how I stop worry and love fundamentals of software design

This is an example of code that don't work, but somewhat correct(I reach home at 11, and i need to wake up to reach here on time)

class BaseBot:
def send(self, question):
raise NotImplemented
from llama_cpp import Llama
class LlamaBot(BaseBot):
def __init__(self):
self.llm = Llama(
model_path="./models/7B/llama-model.gguf",
# n_gpu_layers=-1, # Uncomment to use GPU acceleration
# seed=1337, # Uncomment to set a specific seed
# n_ctx=2048, # Uncomment to increase the context window
)
def send(self, question):
output = self.llm(
"Q: Name the planets in the solar system? A: ", # Prompt
max_tokens=32,
stop=["Q:", "\n"],
echo=True #
)
return output["choice"][0]["text"]
from basebot import BaseBot
class MockBaseBot(BaseBot):
def send(self, question):
return "test data"
from base_bot import BaseBot
import os
import yaml
from openai import OpenAI
class OpenAIBot(BaseBot):
def __init__(self):
api_key = os.getenv("OPENAI_API_KEY")
self.client = OpenAI(
# This is the default and can be omitted
api_key=api_key,
)
self.instruction = "You are a coding assistant that talks like a pirate."
self.model="gpt-4o"
def send(self, question):
response = self.client.responses.create(
model=self.model,
instructions=self.instruction,
input=question,
)
return response.output_text
class BaseConfigurableOpenAIBot(OpenAIBot):
def __init__(self):
super().__init__()
self.load_config()
def load_config(self, source):
raise NotImplemented
class YamlConfigurableOpenAIBot(BaseConfigurableOpenAIBot):
def load_config(self, source):
data = yaml.load(source)
# set variable here
class DBConfigurableOpenAIBot(BaseCOnfigurableOpenAIBot):
def load_config(self, bot_id):
# do your database thing here
# select model, instruction from bot_config where id=bot_id
def test_request():
client = test_client("/mockbot/")
assert client.output == MockBot.send()
# Assume bot registry is populated magically in a vaguely flask like framework
from bot_factory import bot_registry
import route
import request
@route("/<model>/")
def bot_one(model):
question = requests.get("question")
bot = bot_registry[model]()
result = bot.send(question)
return result
@route("/<model>/<bot_id>")
def bot_two(model, bot_id):
question = requests.get("question")
bot = bot_registry["model"]
bot.load_config("source")
# assume you handle method don't exist etc
return bot.send(question)
@route("/bots/<bot_id>")
def bot_three(bot_id):
# assume we do a function that retrieve data from db
# select engine, model, instruction from bot_config where id=bot_id
engine, model, instruction = get_config(bot_id)
bot = bot_registry[model]
bot.load_config_from_data([model, instruction])
return bot.send(question)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment