summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMartin Bielik <martin.bielik@instea.sk>2024-12-05 21:08:45 +0100
committerMartin Bielik <martin.bielik@instea.sk>2024-12-05 21:08:45 +0100
commit007aa33d11e95040f85c0e4a1bbb6c6d213e3cd7 (patch)
tree501677bf5002f7639cad98060b05f71a7556dd78
parentaa5eed08273e1a89d9053e9b49b497b463af5e46 (diff)
downloadvim-ai-007aa33d11e95040f85c0e4a1bbb6c6d213e3cd7.tar.gz
fixed stream=0 in chat engine
Diffstat (limited to '')
-rw-r--r--py/chat.py22
-rw-r--r--py/complete.py19
-rw-r--r--py/utils.py24
3 files changed, 29 insertions, 36 deletions
diff --git a/py/chat.py b/py/chat.py
index 4850bae..e91ea81 100644
--- a/py/chat.py
+++ b/py/chat.py
@@ -50,8 +50,6 @@ initialize_chat_window()
chat_options = parse_chat_header_options()
options = {**config_options, **chat_options}
-openai_options = make_openai_options(options)
-http_options = make_http_options(options)
initial_prompt = '\n'.join(options.get('initial_prompt', []))
initial_messages = parse_chat_messages(initial_prompt)
@@ -70,25 +68,7 @@ try:
print('Answering...')
vim.command("redraw")
- request = {
- 'messages': messages,
- **openai_options
- }
- printDebug("[chat] request: {}", request)
- url = options['endpoint_url']
- response = openai_request(url, request, http_options)
-
- def map_chunk_no_stream(resp):
- printDebug("[chat] response: {}", resp)
- return resp['choices'][0]['message'].get('content', '')
-
- def map_chunk_stream(resp):
- printDebug("[chat] response: {}", resp)
- return resp['choices'][0]['delta'].get('content', '')
-
- map_chunk = map_chunk_stream if openai_options['stream'] else map_chunk_no_stream
-
- text_chunks = map(map_chunk, response)
+ text_chunks = make_chat_text_chunks(messages, options)
render_text_chunks(text_chunks, is_selection)
vim.command("normal! a\n\n>>> user\n\n")
diff --git a/py/complete.py b/py/complete.py
index 31e00a7..868b7ee 100644
--- a/py/complete.py
+++ b/py/complete.py
@@ -13,12 +13,12 @@ config_options = {
**role_options['options_default'],
**role_options['options_complete'],
}
-openai_options = make_openai_options(config_options)
-http_options = make_http_options(config_options)
-
is_selection = vim.eval("l:is_selection")
def complete_engine(prompt):
+ openai_options = make_openai_options(config_options)
+ http_options = make_http_options(config_options)
+
request = {
'prompt': prompt,
**openai_options
@@ -37,18 +37,7 @@ def chat_engine(prompt):
initial_prompt = '\n'.join(initial_prompt)
chat_content = f"{initial_prompt}\n\n>>> user\n\n{prompt}".strip()
messages = parse_chat_messages(chat_content)
- request = {
- 'messages': messages,
- **openai_options
- }
- printDebug("[engine-chat] request: {}", request)
- url = config_options['endpoint_url']
- response = openai_request(url, request, http_options)
- def map_chunk(resp):
- printDebug("[engine-chat] response: {}", resp)
- return resp['choices'][0]['delta'].get('content', '')
- text_chunks = map(map_chunk, response)
- return text_chunks
+ return make_chat_text_chunks(messages, config_options)
engines = {"chat": chat_engine, "complete": complete_engine}
diff --git a/py/utils.py b/py/utils.py
index c2a1a63..3382e59 100644
--- a/py/utils.py
+++ b/py/utils.py
@@ -335,3 +335,27 @@ def parse_prompt_and_role(raw_prompt):
delim = '' if prompt.startswith(':') else ':\n'
prompt = config['role']['prompt'] + delim + prompt
return (prompt, config['options'])
+
+def make_chat_text_chunks(messages, config_options):
+ openai_options = make_openai_options(config_options)
+ http_options = make_http_options(config_options)
+
+ request = {
+ 'messages': messages,
+ **openai_options
+ }
+ printDebug("[engine-chat] request: {}", request)
+ url = config_options['endpoint_url']
+ response = openai_request(url, request, http_options)
+
+ def map_chunk_no_stream(resp):
+ printDebug("[engine-chat] response: {}", resp)
+ return resp['choices'][0]['message'].get('content', '')
+
+ def map_chunk_stream(resp):
+ printDebug("[engine-chat] response: {}", resp)
+ return resp['choices'][0]['delta'].get('content', '')
+
+ map_chunk = map_chunk_stream if openai_options['stream'] else map_chunk_no_stream
+
+ return map(map_chunk, response)