diff options
| author | Martin Bielik <mx.bielik@gmail.com> | 2023-03-12 19:47:21 +0100 |
|---|---|---|
| committer | Martin Bielik <mx.bielik@gmail.com> | 2023-03-12 19:47:21 +0100 |
| commit | 0faae7b1fbec281f5a645f25e5012d9deb307f56 (patch) | |
| tree | 94a58dd42133894af446335175a482698f9c7c44 | |
| parent | 59a89739b3d25fc74891b6e086c40b6830702103 (diff) | |
| download | vim-ai-0faae7b1fbec281f5a645f25e5012d9deb307f56.tar.gz | |
getting rid of global dependencies
Diffstat (limited to '')
| -rw-r--r-- | plugin/vim-ai.vim | 16 | ||||
| -rw-r--r-- | py/chat.py | 31 | ||||
| -rw-r--r-- | py/complete.py | 29 |
3 files changed, 48 insertions, 28 deletions
diff --git a/plugin/vim-ai.vim b/plugin/vim-ai.vim index 6c473b1..a5ded7a 100644 --- a/plugin/vim-ai.vim +++ b/plugin/vim-ai.vim @@ -33,9 +33,11 @@ function! AIRun(...) range if g:vim_ai_debug echo "Prompt:\n" . prompt . "\n" endif - echo "Completing..." - let output = system("echo " . shellescape(prompt) . " | python3 " . s:complete_py . " ") + " WORKAROUND: without sleep is echo on prev line not displayed (when combining with py3) + execute 'silent sleep 1m' + execute "py3file " . s:complete_py + let output = py3eval('output') let output = trim(output) execute "normal! " . a:lastline . "G" @@ -59,7 +61,10 @@ function! AIEditRun(...) range endif echo "Editing..." - let output = system("echo " . shellescape(prompt) . " | python3 " . s:complete_py . " ") + " WORKAROUND: without sleep is echo on prev line not displayed (when combining with py3) + execute 'silent sleep 1m' + execute "py3file " . s:complete_py + let output = py3eval('output') let output = trim(output) execute a:firstline . ',' . a:lastline . 'd' @@ -107,7 +112,10 @@ function! AIChatRun(...) range endif echo "Answering..." - let output = system("echo " . shellescape(prompt) . " | python3 " . s:chat_py . " ") + " WORKAROUND: without sleep is echo on prev line not displayed (when combining with py3) + execute 'silent sleep 1m' + execute "py3file " . s:chat_py + let output = py3eval('output') set paste execute "normal! ggdG" @@ -1,8 +1,8 @@ -import openai - +import requests import sys import os -import openai + +file_content = vim.eval("prompt") config_file_path = os.path.join(os.path.expanduser("~"), ".config/openai.token") @@ -14,12 +14,9 @@ try: except Exception: pass -openai.api_key = api_key.strip() - -lines = sys.stdin.readlines() - -file_content = "".join(lines) +api_key = api_key.strip() +lines = file_content.splitlines() messages = [] for line in lines: @@ -40,11 +37,19 @@ if not messages: file_content = ">>> user\n\n" + file_content messages.append({"role": "user", "content": file_content }) -response = openai.ChatCompletion.create( - model="gpt-3.5-turbo", - messages=messages -) + +url = "https://api.openai.com/v1/chat/completions" +headers = { + 'Content-Type': 'application/json', + 'Authorization': F"Bearer {api_key}" +} +data = { + "model": "gpt-3.5-turbo", + "messages": messages +} +response = requests.post(url, headers=headers, json=data) +response = response.json() answer = response['choices'][0]['message']['content'] -print(f"{file_content.strip()}\n\n<<< assistant\n\n{answer.strip()}\n\n>>> user\n") +output = f"{file_content.strip()}\n\n<<< assistant\n\n{answer.strip()}\n\n>>> user\n" diff --git a/py/complete.py b/py/complete.py index ba460c4..409d47f 100644 --- a/py/complete.py +++ b/py/complete.py @@ -1,6 +1,8 @@ +import requests import sys import os -import openai + +prompt = vim.eval("prompt") config_file_path = os.path.join(os.path.expanduser("~"), ".config/openai.token") @@ -12,15 +14,20 @@ try: except Exception: pass -openai.api_key = api_key.strip() - -prompt = "".join(sys.stdin.readlines()) +api_key = api_key.strip() -completion = openai.Completion.create( - model="text-davinci-003", - prompt=prompt, - max_tokens=1000, - temperature=0.1 -) +url = "https://api.openai.com/v1/completions" +headers = { + 'Content-Type': 'application/json', + 'Authorization': f"Bearer {api_key}" +} +data = { + "model": "text-davinci-003", + "prompt":prompt, + "max_tokens": 1000, + "temperature": 0.1 +} +response = requests.post(url, headers=headers, json=data) +response = response.json() -print(completion.choices[0].text) +output = response['choices'][0]['text'] |