summaryrefslogtreecommitdiff
path: root/py/complete.py
blob: 8a8bf7dc780392fdfac8424e141428bc42fb9568 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# import utils
plugin_root = vim.eval("s:plugin_root")
vim.command(f"py3file {plugin_root}/py/utils.py")

engine = vim.eval("engine")
config_options = vim.eval("options")
request_options = make_request_options(config_options)

prompt = vim.eval("prompt").strip()

def complete_engine(prompt):
    request = {
        'stream': True,
        'prompt': prompt,
        **request_options
    }
    printDebug("[engine-complete] request: {}", request)
    response = openai_request('https://api.openai.com/v1/completions', request)
    def map_chunk(resp):
        printDebug("[engine-complete] response: {}", resp)
        return resp['choices'][0].get('text', '')
    text_chunks = map(map_chunk, response)
    return text_chunks

def chat_engine(prompt):
    initial_prompt = config_options.get('initial_prompt', [])
    initial_prompt = '\n'.join(initial_prompt)
    chat_content = f"{initial_prompt}\n\n>>> user\n\n{prompt}".strip()
    messages = parse_chat_messages(chat_content)
    request = {
        'stream': True,
        'messages': messages,
        **request_options
    }
    printDebug("[engine-chat] request: {}", request)
    response = openai_request('https://api.openai.com/v1/chat/completions', request)
    def map_chunk(resp):
        printDebug("[engine-chat] response: {}", resp)
        return resp['choices'][0]['delta'].get('content', '')
    text_chunks = map(map_chunk, response)
    return text_chunks

engines = {"chat": chat_engine, "complete": complete_engine}

try:
    if prompt:
        print('Completing...')
        vim.command("redraw")
        text_chunks = engines[engine](prompt)
        render_text_chunks(text_chunks)
except KeyboardInterrupt:
    vim.command("normal! a Ctrl-C...")