Created
May 12, 2024 03:40
-
-
Save ddh0/5d64e6683387ddf01e77d5d636f865ee to your computer and use it in GitHub Desktop.
Make Siri speak for your LLM, using macOS `say` and easy-llama
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import easy_llama as ez | |
from subprocess import run | |
USER = ez.utils.USER_STYLE | |
BOT = ez.utils.BOT_STYLE | |
DIM = ez.utils.DIM_STYLE | |
RESET = ez.utils.RESET_ALL | |
def say(something: str) -> None: | |
run(['say', something]) | |
Llama3 = ez.Model( | |
'Meta-Llama-3-8B-Instruct-q8_0.gguf', | |
n_gpu_layers=-1, | |
flash_attn=True, | |
verbose=False | |
) | |
Thread = ez.Thread( | |
Llama3, | |
ez.formats.llama3, | |
ez.samplers.TikTokenSampling | |
) | |
try: | |
while True: | |
prompt = input(f'{RESET}\n > {USER}') | |
print(RESET) | |
response = Thread.send(prompt) | |
say(response) | |
print(f"{DIM}{response}{RESET}") | |
except KeyboardInterrupt: | |
pass | |
finally: | |
print(f"{RESET}\n") | |
print(repr(Thread)) | |
print("-" * 32) | |
Thread.print_stats() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
See here: https://github.com/ddh0/easy-llama