Created
February 7, 2025 23:44
-
-
Save hradec/5083d91018de12269f6b3760e9bfa15e to your computer and use it in GitHub Desktop.
aipython.py - a python wrapper to run scripts, and if a script fails to run, it automatically asks ollama to fix it, test again and so on...
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
import subprocess | |
import sys | |
import os | |
import tempfile | |
import time | |
import threading | |
from ollama import Client | |
# Retrieve the Ollama API host from the environment variable. | |
OLLAMA_API = os.environ.get("OLLAMA_API") | |
if not OLLAMA_API: | |
os.environ["OLLAMA_API"] = 'http://192.168.10.207:11434' | |
# Create an Ollama client instance using the host from the env variable. | |
client = Client(host=OLLAMA_API) | |
def stream_output(pipe, is_error=False, collector=None): | |
""" | |
Reads lines from a subprocess pipe (stdout or stderr) and prints them as they arrive. | |
Optionally, lines can be collected for later use. | |
""" | |
for line in iter(pipe.readline, ''): | |
if line: | |
if is_error: | |
print(line, end='', file=sys.stderr) | |
else: | |
print(line, end='') | |
if collector is not None: | |
collector.append(line) | |
pipe.close() | |
def run_script(args): | |
""" | |
Runs the Python script at the given path, streaming out both stdout and stderr in real time. | |
Returns a tuple of (returncode, collected stderr output). | |
""" | |
process = subprocess.Popen( | |
[sys.executable] + args, | |
stdout=subprocess.PIPE, | |
stderr=subprocess.PIPE, | |
text=True | |
) | |
stdout_lines = [] | |
stderr_lines = [] | |
stdout_thread = threading.Thread(target=stream_output, args=(process.stdout, False, stdout_lines)) | |
stderr_thread = threading.Thread(target=stream_output, args=(process.stderr, True, stderr_lines)) | |
stdout_thread.start() | |
stderr_thread.start() | |
process.wait() | |
stdout_thread.join() | |
stderr_thread.join() | |
return process.returncode, ''.join(stderr_lines) | |
def extract_python_code(response): | |
""" | |
Extracts Python code from the response string. | |
The expected code is enclosed between a line that contains only "```python" | |
and a line that contains only "```". | |
""" | |
lines = response.splitlines() | |
in_code = False | |
code_lines = [] | |
for line in lines: | |
if not in_code and line.strip() == "```python": | |
in_code = True | |
continue | |
elif in_code and line.strip() == "```": | |
break | |
if in_code: | |
code_lines.append(line) | |
return "\n".join(code_lines) | |
def extract_diff_code(response): | |
""" | |
Extracts Python code from the response string. | |
The expected code is enclosed between a line that contains only "```python" | |
and a line that contains only "```". | |
""" | |
lines = response.splitlines() | |
in_code = False | |
code_lines = [] | |
for line in lines: | |
if not in_code and line.strip() == "```diff": | |
in_code = True | |
continue | |
elif in_code and line.strip() == "```": | |
break | |
if in_code: | |
code_lines.append(line) | |
return "\n".join(code_lines) | |
def call_ollama(script_content, error_output): | |
""" | |
Uses the Ollama client to analyze and fix the script. | |
Constructs a prompt with the script and error message, then streams the response. | |
The prompt is crafted to force the model to return the full fixed source code, | |
enclosed in a markdown code block starting with ```python and ending with ```. | |
""" | |
prompt = ( | |
"Script:\n" | |
f"{script_content}\n\n" | |
"Error:\n" | |
f"{error_output}\n\n" | |
) | |
print(f'Error: {prompt.split("Error:")[1]}') | |
# Stream the response from Ollama. | |
stream = client.chat( | |
model='deepseek-r1:8b', | |
messages=[ | |
{'role': 'system', 'content': 'You are a Python code fixer. Given the following script and error message, ' | |
'provide a corrected version of the full Python source code. ' | |
'Dont give any explanation or context, no <think> output either, just the fixed code.' | |
# 'Reply the full original source, modified with the fix. ' | |
'Reply a "git diff" of the fix and the original code, that can be applied with "git apply".' | |
'Never reply snippets, incomplete code or diffs. Use ASCII color characters to format the response instead of markdown.\n\n'}, | |
{'role': 'user', 'content': prompt} | |
], | |
stream=True, | |
options={'num_ctx': 16384} | |
) | |
full_response = "" | |
for chunk in stream: | |
chunk_content = chunk['message']['content'] | |
print(chunk_content, end='', flush=True) | |
full_response += chunk_content | |
# Extract the Python code from the full response. | |
fixed_code = extract_python_code(full_response) | |
return fixed_code | |
def main(): | |
if len(sys.argv) < 2: | |
print("Usage: python auto_fix.py <script_file.py>") | |
sys.exit(1) | |
original_script = sys.argv[1] | |
current_script = original_script | |
is_temporary = False # Tracks if the file being executed is a temporary (fixed) version | |
while True: | |
print(f"\n\nRunning script: {current_script}") | |
returncode, stderr = run_script([current_script] + sys.argv[2:]) | |
# If the script runs successfully (exit code 0 and no stderr), finish. | |
if returncode == 0 and stderr.strip() == "": | |
print("\nScript executed successfully.") | |
# If a temporary file was used, rename it with a '_fixed.py' suffix. | |
if is_temporary and current_script != original_script: | |
base, _ = os.path.splitext(original_script) | |
fixed_script_name = base + "_fixed.py" | |
os.rename(current_script, fixed_script_name) | |
print(f"Temporary file renamed to {fixed_script_name}") | |
break | |
else: | |
print("\nError detected. Attempting to fix the script via Ollama...\n") | |
# Read the content of the current script. | |
with open(current_script, "r") as f: | |
script_content = f.read() | |
try: | |
fixed_script = call_ollama(script_content, stderr) | |
except Exception as e: | |
print("Failed to fix the script:", e) | |
sys.exit(1) | |
if not fixed_script.strip(): | |
print("No fixed code was extracted from Ollama's response. Exiting.") | |
sys.exit(1) | |
# Write the fixed script to a temporary file for the next iteration. | |
with tempfile.NamedTemporaryFile(mode="w", suffix=".py", delete=False) as tmp: | |
tmp.write(fixed_script) | |
temp_script_path = tmp.name | |
print(f"\nFixed script written to temporary file: {temp_script_path}") | |
current_script = temp_script_path | |
is_temporary = True | |
# Optional: add a short delay before retrying. | |
time.sleep(1) | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment