diff options
Diffstat (limited to '')
| -rw-r--r-- | py/chat.py | 47 | ||||
| -rw-r--r-- | py/complete.py | 39 | ||||
| -rw-r--r-- | py/utils.py | 12 |
3 files changed, 44 insertions, 54 deletions
@@ -1,20 +1,12 @@ -import requests -import sys -import os +import openai -file_content = vim.eval("prompt") +# import utils +plugin_root = vim.eval("s:plugin_root") +vim.command(f"py3file {plugin_root}/py/utils.py") -config_file_path = os.path.join(os.path.expanduser("~"), ".config/openai.token") +openai.api_key = load_api_key() -api_key = os.getenv("OPENAI_API_KEY") - -try: - with open(config_file_path, 'r') as file: - api_key = file.read() -except Exception: - pass - -api_key = api_key.strip() +file_content = vim.eval('trim(join(getline(1, "$"), "\n"))') lines = file_content.splitlines() messages = [] @@ -37,19 +29,20 @@ if not messages: file_content = ">>> user\n\n" + file_content messages.append({"role": "user", "content": file_content }) +vim.command("normal! Go\n<<< assistant\n\n") +vim.command("redraw") -url = "https://api.openai.com/v1/chat/completions" -headers = { - 'Content-Type': 'application/json', - 'Authorization': F"Bearer {api_key}" -} -data = { - "model": "gpt-3.5-turbo", - "messages": messages -} -response = requests.post(url, headers=headers, json=data) -response = response.json() +response = openai.ChatCompletion.create( + model="gpt-3.5-turbo", + messages=messages, + stream=True, +) -answer = response['choices'][0]['message']['content'] +for resp in response: + if 'content' in resp['choices'][0]['delta']: + text = resp['choices'][0]['delta']['content'] + vim.command("normal! a" + text) + vim.command("redraw") -output = f"{file_content.strip()}\n\n<<< assistant\n\n{answer.strip()}\n\n>>> user\n" +vim.command("normal! a\n\n>>> user\n") +vim.command("redraw") diff --git a/py/complete.py b/py/complete.py index 409d47f..c37cbd0 100644 --- a/py/complete.py +++ b/py/complete.py @@ -1,33 +1,18 @@ -import requests -import sys -import os +import openai -prompt = vim.eval("prompt") - -config_file_path = os.path.join(os.path.expanduser("~"), ".config/openai.token") +# import utils +plugin_root = vim.eval("s:plugin_root") +vim.command(f"py3file {plugin_root}/py/utils.py") -api_key = os.getenv("OPENAI_API_KEY") - -try: - with open(config_file_path, 'r') as file: - api_key = file.read() -except Exception: - pass +prompt = vim.eval("prompt") -api_key = api_key.strip() +openai.api_key = load_api_key() -url = "https://api.openai.com/v1/completions" -headers = { - 'Content-Type': 'application/json', - 'Authorization': f"Bearer {api_key}" -} -data = { - "model": "text-davinci-003", - "prompt":prompt, - "max_tokens": 1000, - "temperature": 0.1 -} -response = requests.post(url, headers=headers, json=data) -response = response.json() +response = openai.Completion.create( + model="text-davinci-003", + prompt=prompt, + max_tokens=1000, + temperature=0.1 +) output = response['choices'][0]['text'] diff --git a/py/utils.py b/py/utils.py new file mode 100644 index 0000000..c9e88f2 --- /dev/null +++ b/py/utils.py @@ -0,0 +1,12 @@ +import sys +import os + +def load_api_key(): + config_file_path = os.path.join(os.path.expanduser("~"), ".config/openai.token") + api_key = os.getenv("OPENAI_API_KEY") + try: + with open(config_file_path, 'r') as file: + api_key = file.read() + except Exception: + pass + return api_key.strip() |