diff options
| author | Martin Bielik <martin.bielik@instea.sk> | 2024-12-05 21:08:45 +0100 |
|---|---|---|
| committer | Martin Bielik <martin.bielik@instea.sk> | 2024-12-05 21:08:45 +0100 |
| commit | 007aa33d11e95040f85c0e4a1bbb6c6d213e3cd7 (patch) | |
| tree | 501677bf5002f7639cad98060b05f71a7556dd78 /py/utils.py | |
| parent | aa5eed08273e1a89d9053e9b49b497b463af5e46 (diff) | |
| download | vim-ai-007aa33d11e95040f85c0e4a1bbb6c6d213e3cd7.tar.gz | |
fixed stream=0 in chat engine
Diffstat (limited to 'py/utils.py')
| -rw-r--r-- | py/utils.py | 24 |
1 files changed, 24 insertions, 0 deletions
diff --git a/py/utils.py b/py/utils.py index c2a1a63..3382e59 100644 --- a/py/utils.py +++ b/py/utils.py @@ -335,3 +335,27 @@ def parse_prompt_and_role(raw_prompt): delim = '' if prompt.startswith(':') else ':\n' prompt = config['role']['prompt'] + delim + prompt return (prompt, config['options']) + +def make_chat_text_chunks(messages, config_options): + openai_options = make_openai_options(config_options) + http_options = make_http_options(config_options) + + request = { + 'messages': messages, + **openai_options + } + printDebug("[engine-chat] request: {}", request) + url = config_options['endpoint_url'] + response = openai_request(url, request, http_options) + + def map_chunk_no_stream(resp): + printDebug("[engine-chat] response: {}", resp) + return resp['choices'][0]['message'].get('content', '') + + def map_chunk_stream(resp): + printDebug("[engine-chat] response: {}", resp) + return resp['choices'][0]['delta'].get('content', '') + + map_chunk = map_chunk_stream if openai_options['stream'] else map_chunk_no_stream + + return map(map_chunk, response) |