Last active
February 11, 2025 22:42
-
-
Save aurotripathy/cbb21b3b89445fc33ba672bdf3a310ad to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from openai import OpenAI | |
from dotenv import load_dotenv | |
import numpy as np | |
load_dotenv() | |
def chat_with_gpt(messages): | |
client = OpenAI() | |
try: | |
completion = client.chat.completions.create( | |
model="gpt-3.5-turbo", | |
messages=messages, | |
logprobs=True, | |
top_logprobs=2, | |
) | |
return completion.choices[0].message.content, completion.choices[0].logprobs.content | |
except Exception as e: | |
print(f"Error: {e}") | |
return {"role": "assistant", "content": "I've encountered an error {e}."} | |
user_queries = ["In 7 words or less, when will we reach AGI?", "In 7 words or less, what's a life well-lived?"] | |
for query in user_queries: | |
print("-" * 50) | |
chat_messages = [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": query}] | |
response, logprobs = chat_with_gpt(chat_messages) | |
print(f'Query: {query} -> Response: {response}') | |
for i, logprob in enumerate(logprobs): | |
print(f'{str(i).ljust(2)} token: {logprob.token.ljust(20)}:\t logprob: {str(logprob.logprob).ljust(15)} \tprob: {np.exp(logprob.logprob):.3f} \ | |
token: {logprob.top_logprobs[0].token.ljust(15)} log_prob: {logprob.top_logprobs[0].logprob:.3f} \ | |
top_logprobs: {logprob.top_logprobs[1].token.ljust(10)} {logprob.top_logprobs[1].logprob}') | |
perplexity_score = np.exp(-np.mean(logprob.logprob)) | |
print(f'Perplexity score: {perplexity_score:.3f}') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment