File size: 3,477 Bytes
3943768
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import argparse
import os
import subprocess
import sys

try:
    from importlib.metadata import distribution, PackageNotFoundError
    assert distribution('aider-chat') is not None
    have_aider = True
except (PackageNotFoundError, AssertionError):
    have_aider = False


def install_aider():
    if not have_aider:
        subprocess.check_call([sys.executable, "-m", "pip", "install", "aider-chat>=0.59.0"])
        print("Successfully installed aider-chat.")


def main():
    # Install aider-chat if not already installed
    try:
        import aider
    except ImportError:
        print("aider-chat not found. Installing...")
        install_aider()

    # Now we can safely import from aider
    from aider.coders import Coder
    from aider.models import Model
    from aider.io import InputOutput

    default_max_time = int(os.getenv('H2OGPT_AGENT_OPENAI_TIMEOUT', "120"))

    parser = argparse.ArgumentParser(description="Aider Coding Tool")
    parser.add_argument("--model", type=str, help="Model to use for coding assistance")
    parser.add_argument("--files", nargs="+", required=False, help="Files to work on")
    parser.add_argument("--output_dir", type=str, default="aider_output", help="Directory for output files")
    parser.add_argument("--prompt", "--query", type=str, required=True, help="Prompt or query for the coding task")
    parser.add_argument("--max_time", type=int, default=default_max_time, help="Maximum time in seconds for API calls")
    parser.add_argument("--verbose", action="store_true", help="Show verbose output")
    args = parser.parse_args()

    # Ensure output directory exists
    os.makedirs(args.output_dir, exist_ok=True)

    # Set up OpenAI-like client
    base_url = os.getenv('H2OGPT_OPENAI_BASE_URL')
    assert base_url is not None, "H2OGPT_OPENAI_BASE_URL environment variable is not set"
    server_api_key = os.getenv('H2OGPT_OPENAI_API_KEY', 'EMPTY')
    from openai import OpenAI
    client = OpenAI(base_url=base_url, api_key=server_api_key, timeout=args.max_time)

    # Set environment variables for Aider
    os.environ['OPENAI_API_KEY'] = server_api_key
    os.environ['OPENAI_API_BASE'] = base_url

    # Set up InputOutput with streaming enabled
    io = InputOutput(
        yes=True,
        chat_history_file=os.path.join(args.output_dir, "chat_history.txt"),
        pretty=True,
    )

    # Determine which model to use
    if args.model:
        selected_model = args.model
    elif os.getenv('H2OGPT_AGENT_OPENAI_MODEL'):
        selected_model = os.getenv('H2OGPT_AGENT_OPENAI_MODEL')
    else:
        # Only fetch the model list if we need to use the default
        model_list = client.models.list()
        selected_model = model_list.data[0].id

    print(f"Using model: {selected_model}")

    # Set up Model
    main_model = Model(selected_model)

    # Set up Coder with streaming enabled
    coder = Coder.create(
        main_model=main_model,
        fnames=args.files if args.files else [],
        io=io,
        stream=True,
        use_git=False,
        edit_format="diff"
        #edit_format="whole"  # required for weaker models
    )

    # Run the prompt
    output = coder.run(args.prompt)

    # Save the output
    output_file = os.path.join(args.output_dir, "aider_output.txt")
    with open(output_file, "w") as f:
        f.write(output)

    if args.verbose:
        print(f"Task completed. Output saved to {output_file}")


if __name__ == "__main__":
    main()