Last active
August 29, 2023 21:55
Revisions
-
ddrscott revised this gist
Aug 29, 2023 . 1 changed file with 14 additions and 9 deletions.There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -1,3 +1,4 @@ #!/usr/bin/env python3 """ Requirements: pip install click langchain openai @@ -16,6 +17,8 @@ def on_llm_end(self, response, **kwargs) -> None: click.echo('\n') from langchain.chat_models import ChatOpenAI from langchain.schema import HumanMessage, SystemMessage def auto_lint(data, model): llm=ChatOpenAI( @@ -26,17 +29,19 @@ def auto_lint(data, model): callbacks=[CustomHandler()], streaming=True, ) messages = [ SystemMessage(content="""You are a Python script repair bot. You are given Python scripts and your job is to perfect them. You add Google style docstrings to functions, correct spelling mistakes, and cleanup whitespace. You follow PEP8 style guidelines and use Black to format the code. You also add TODO comments for smelly code and FIXME comments for known bugs. You never explain yourself, just fix the code.""" ), HumanMessage(content=data + "\n\n"), ] llm(messages) @click.command() @click.option('--model', '-m', default='gpt-3.5-turbo-16k-0613') -
ddrscott created this gist
Aug 29, 2023 .There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -0,0 +1,53 @@ """ Requirements: pip install click langchain openai """ import sys import click from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler class CustomHandler(StreamingStdOutCallbackHandler): def on_llm_start(self, serialized, prompts, **_) -> None: pass def on_llm_new_token(self, token: str, **_) -> None: click.echo(token, nl=False) def on_llm_end(self, response, **kwargs) -> None: click.echo('\n') from langchain.chat_models import ChatOpenAI def auto_lint(data, model): llm=ChatOpenAI( client=None, model=model, temperature=0.1, verbose=True, callbacks=[CustomHandler()], streaming=True, ) llm.predict(f"""You are an expert Python developer. Please make the following updates to the attached code: - add useful Google style docstrings to functions. - fix spelling mistakes. - strip whitespace. ```python {data} ``` Updated Python code:""") @click.command() @click.option('--model', '-m', default='gpt-3.5-turbo-16k-0613') @click.argument('src', type=click.File('r'), default=sys.stdin) def my_command(model, src): data = None with src: data = src.read() auto_lint(data, model) if __name__ == '__main__': my_command()