Skip to content

Instantly share code, notes, and snippets.

@BoQsc
Last active September 3, 2024 19:38
Show Gist options
  • Save BoQsc/0b7fa5835ff1f9f71bffb82b04bfd2c0 to your computer and use it in GitHub Desktop.
Save BoQsc/0b7fa5835ff1f9f71bffb82b04bfd2c0 to your computer and use it in GitHub Desktop.
groq factchecker llm
from groq import Groq
#Give me list of built-in python libraries for UI that require no installation in cross-platform way
# who were all characters in Serial Experiments Lain anime
# what was the first version of python?
# Initialize the Groq client
api_key = "gsk_"
client = Groq(api_key=api_key)
# User input
prompt = input("Enter your statement: ")
initial_prompt = prompt
# Start with the user input
current_prompt = prompt
# Loop for 10 iterations, appending "check facts"
for i in range(15):
# Create completion request to Groq with appended "check facts"
completion = client.chat.completions.create(
model="llama3-8b-8192",
messages=[{"role": "user", "content": current_prompt + " check facts" + " Provide me only with correct answers to " + initial_prompt}],
temperature=1,
max_tokens=7999,
top_p=1,
stream=False,
stop=None,
)
# Get the output from the model correctly
output = completion.choices[0].message.content # Accessing content directly
# Print the output
print(f"Iteration {i+1}: {output}")
# Set the output as the new prompt for the next iteration
current_prompt = output
# Final iteration, appending "Provide me only with correct answers"
completion = client.chat.completions.create(
model="llama3-8b-8192",
messages=[{"role": "user", "content": current_prompt + " Provide me only with correct answers to " + initial_prompt}],
temperature=0.5,
max_tokens=1024,
top_p=1,
stream=False,
stop=None,
)
# messages=[{"role": "user", "content": current_prompt + " Provide me only with correct answers to " + initial_prompt + "Provide with explanation for each."}],
# Get the final output from the model
print("-----------------------------\n")
final_output = completion.choices[0].message.content # Accessing content correctly
# Print the final output
print("Final Iteration (Provide correct answers):", final_output)
from groq import Groq
#Give me list of built-in python libraries for UI that require no installation in cross-platform way
# who were all characters in Serial Experiments Lain anime
# what was the first version of python?
# Initialize the Groq client
api_key = "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"
client = Groq(api_key=api_key)
# User input
prompt = input("Enter your statement: ")
initial_prompt = prompt
# Start with the user input
current_prompt = prompt
# Loop for 10 iterations, appending "check facts"
for i in range(15):
# Create completion request to Groq with appended "check facts"
completion = client.chat.completions.create(
model="llama-3.1-8b-instant",
messages=[{"role": "user", "content": current_prompt + " check facts" + " Provide me only with correct answers to " + initial_prompt}],
temperature=1,
max_tokens=7999,
top_p=1,
stream=False,
stop=None,
)
# Get the output from the model correctly
output = completion.choices[0].message.content # Accessing content directly
# Print the output
import os
os.system('cls')
print(f"Iteration {i+1}: {output}")
# Set the output as the new prompt for the next iteration
current_prompt = output
# Final iteration, appending "Provide me only with correct answers"
completion = client.chat.completions.create(
model="llama3-8b-8192",
messages=[{"role": "user", "content": current_prompt + " Tell me why and Provide me only with correct answers or select answer to " + initial_prompt}],
temperature=0.5,
max_tokens=1024,
top_p=1,
stream=False,
stop=None,
)
# messages=[{"role": "user", "content": current_prompt + " Provide me only with correct answers to " + initial_prompt + "Provide with explanation for each."}],
# Get the final output from the model
print("-----------------------------\n")
final_output = completion.choices[0].message.content # Accessing content correctly
# Print the final output
print("Final Iteration (Provide correct answers):", final_output)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment