diff options
Diffstat (limited to 'py')
| -rw-r--r-- | py/chat.py | 11 | ||||
| -rw-r--r-- | py/complete.py | 2 | ||||
| -rw-r--r-- | py/utils.py | 14 |
3 files changed, 19 insertions, 8 deletions
@@ -71,16 +71,23 @@ try: vim.command("redraw") request = { - 'stream': True, 'messages': messages, **openai_options } printDebug("[chat] request: {}", request) url = options['endpoint_url'] response = openai_request(url, request, http_options) - def map_chunk(resp): + + def map_chunk_no_stream(resp): + printDebug("[chat] response: {}", resp) + return resp['choices'][0]['message'].get('content', '') + + def map_chunk_stream(resp): printDebug("[chat] response: {}", resp) return resp['choices'][0]['delta'].get('content', '') + + map_chunk = map_chunk_stream if openai_options['stream'] else map_chunk_no_stream + text_chunks = map(map_chunk, response) render_text_chunks(text_chunks, is_selection) diff --git a/py/complete.py b/py/complete.py index f340e96..31e00a7 100644 --- a/py/complete.py +++ b/py/complete.py @@ -20,7 +20,6 @@ is_selection = vim.eval("l:is_selection") def complete_engine(prompt): request = { - 'stream': True, 'prompt': prompt, **openai_options } @@ -39,7 +38,6 @@ def chat_engine(prompt): chat_content = f"{initial_prompt}\n\n>>> user\n\n{prompt}".strip() messages = parse_chat_messages(chat_content) request = { - 'stream': True, 'messages': messages, **openai_options } diff --git a/py/utils.py b/py/utils.py index 9778f1a..c54f2e5 100644 --- a/py/utils.py +++ b/py/utils.py @@ -49,13 +49,13 @@ def normalize_config(config): normalized['options']['initial_prompt'] = normalized['options']['initial_prompt'].split('\n') return normalized - def make_openai_options(options): max_tokens = int(options['max_tokens']) return { 'model': options['model'], 'max_tokens': max_tokens if max_tokens > 0 else None, 'temperature': float(options['temperature']), + 'stream': int(options['stream']) == 1, } def make_http_options(options): @@ -91,8 +91,10 @@ def render_text_chunks(chunks, is_selection): full_text = '' insert_before_cursor = need_insert_before_cursor(is_selection) for text in chunks: - if not text.strip() and not generating_text: - continue # trim newlines from the beginning + if not generating_text: + text = text.lstrip() # trim newlines from the beginning + if not text: + continue generating_text = True if insert_before_cursor: vim.command("normal! i" + text) @@ -103,7 +105,7 @@ def render_text_chunks(chunks, is_selection): vim.command("redraw") full_text += text if not full_text.strip(): - print_info_message('Empty response received. Tip: You can try modifying the prompt and retry.') + raise KnownError('Empty response received. Tip: You can try modifying the prompt and retry.') def parse_chat_messages(chat_content): @@ -218,7 +220,11 @@ def openai_request(url, data, options): headers=headers, method="POST", ) + with urllib.request.urlopen(req, timeout=request_timeout) as response: + if not data['stream']: + yield json.loads(response.read().decode()) + return for line_bytes in response: line = line_bytes.decode("utf-8", errors="replace") if line.startswith(OPENAI_RESP_DATA_PREFIX): |