Skip to content

Instantly share code, notes, and snippets.

from openai import OpenAI
from dotenv import load_dotenv
import numpy as np
load_dotenv()
def chat_with_gpt(messages):
client = OpenAI()
try:
completion = client.chat.completions.create(
model="gpt-3.5-turbo",
"""
MistralForCausalLM(
(model): MistralModel(
(embed_tokens): Embedding(131072, 5120)
(layers): ModuleList(
(0-39): 40 x MistralDecoderLayer(
(self_attn): MistralAttention(
(q_proj): Linear(in_features=5120, out_features=4096, bias=False)
(k_proj): Linear(in_features=5120, out_features=1024, bias=False)
(v_proj): Linear(in_features=5120, out_features=1024, bias=False)
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@aurotripathy
aurotripathy / furiosa-rngd-tool-calling-example.ipynb
Last active December 16, 2024 20:53
Furiosa RNGD Tool Calling Example
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@aurotripathy
aurotripathy / llama3.1-tool-calling-with-local-furiosa-llm.py
Last active December 11, 2024 12:32
llama3.1-tool-calling-with-local-llm.py
# attribution : https://github.com/AgiFlow/llama31/blob/main/tool_calls.ipynb
from furiosa_llm import LLM, SamplingParams
prompt = """
<|begin_of_text|>
<|start_header_id|>system<|end_header_id|>
You are a helpful assistant with tool calling capabilities. When you receive a tool call response, use the output to format an answer to the orginal use question.
If you are using tools, respond in the format {"name": function name, "parameters": dictionary of function arguments}. Do not use variables.
Enter a number: 10
[0, 1, 1, 2, 3, 5, 8, 13, 21, 34]
def fibonacci(n: int):
"""Return a fibonacci series upto the argument n"""
# As prefix, we provided the function proto, a docstring...
# <codestral will fill in the middle, i.e., the algorithm>
# ...and the invocation (the suffix)
@aurotripathy
aurotripathy / filled-code.py
Created June 3, 2024 06:08
The "middle" code generated by codestral
def fibonacci(n: int):
"""Return a fibonacci series upto the argument n"""
if n == 0:
return []
elif n == 1:
return [0]
elif n == 2:
return [0, 1]
else:
@aurotripathy
aurotripathy / fill-in-the-middle.py
Created June 3, 2024 06:06
Fill-in-the-middle with the Codestral model
import os
from mistralai.client import MistralClient
from mistralai.models.chat_completion import ChatMessage
import os
from mistralai.client import MistralClient
"""
Uses a combination of 'furiosactl info' and 'furiosa top' to collect temperature + power + NPU utilization over the entire run
"""
import subprocess
import time
import dateutil
from datetime import datetime
from dateutil.parser import parse
import csv