summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md42
-rw-r--r--autoload/vim_ai.vim6
-rw-r--r--doc/tags2
-rw-r--r--doc/vim-ai.txt12
-rw-r--r--plugin/vim-ai.vim98
-rw-r--r--py/chat.py48
-rw-r--r--py/complete.py4
-rw-r--r--py/utils.py8
8 files changed, 167 insertions, 53 deletions
diff --git a/README.md b/README.md
index dc997a3..f92377c 100644
--- a/README.md
+++ b/README.md
@@ -49,7 +49,6 @@ Plug 'madox2/vim-ai', { 'do': './install.sh' }
### :AIChat
-
`:AIChat` - continue or start a new conversation.
`(visual selection)? :AIChat {instruction}?` - start a new conversation given the selection, the instruction or both
@@ -76,6 +75,13 @@ You are a Clean Code expert, I have the following code, please refactor it in a
Supported chat roles are **`>>> system`**, **`>>> user`** and **`<<< assistant`**
+### :AIRedo
+
+`:AIRedo` - repeat last AI command
+
+Use this immediately after `AI`/`AIEdit`/`AIChat` command in order to re-try or get an alternative completion.
+Note that the randomness of responses heavily depends on the [`temperature`](https://platform.openai.com/docs/api-reference/completions/create#completions/create-temperature) parameter.
+
## Configuration
### Key bindings
@@ -94,6 +100,40 @@ nnoremap <leader>s :AIEdit fix grammar and spelling<CR>
" trigger chat
xnoremap <leader>c :AIChat<CR>
nnoremap <leader>c :AIChat<CR>
+
+" redo last AI command
+nnoremap <leader>r :AIRedo<CR>
+```
+
+
+### Interface configuration
+
+Default interface configuration:
+
+```vim
+let g:vim_ai_chat = {
+\ "ui": {
+\ "open_chat_command": "below new | call vim_ai#MakeScratchWindow()",
+\ },
+\}
+```
+
+Tips:
+
+```vim
+" restore conversation from the file
+let g:vim_ai_chat = {
+\ "ui": {
+\ "open_chat_command": "below new /tmp/last_conversation.aichat",
+\ },
+\}
+
+" open chat in a new tab
+let g:vim_ai_chat = {
+\ "ui": {
+\ "open_chat_command": "tabnew | call vim_ai#MakeScratchWindow()",
+\ },
+\}
```
### Completion configuration
diff --git a/autoload/vim_ai.vim b/autoload/vim_ai.vim
new file mode 100644
index 0000000..4957c5b
--- /dev/null
+++ b/autoload/vim_ai.vim
@@ -0,0 +1,6 @@
+function! vim_ai#MakeScratchWindow()
+ setlocal buftype=nofile
+ setlocal bufhidden=hide
+ setlocal noswapfile
+ setlocal ft=aichat
+endfunction
diff --git a/doc/tags b/doc/tags
index 20ddc90..469a9be 100644
--- a/doc/tags
+++ b/doc/tags
@@ -1,7 +1,9 @@
:AI vim-ai.txt /*:AI*
:AIChat vim-ai.txt /*:AIChat*
:AIEdit vim-ai.txt /*:AIEdit*
+:AIRedo vim-ai.txt /*:AIRedo*
vim-ai vim-ai.txt /*vim-ai*
vim-ai-about vim-ai.txt /*vim-ai-about*
vim-ai-commands vim-ai.txt /*vim-ai-commands*
+vim-ai-config vim-ai.txt /*vim-ai-config*
vim-ai.txt vim-ai.txt /*vim-ai.txt*
diff --git a/doc/vim-ai.txt b/doc/vim-ai.txt
index 24a1ebe..d9c3eeb 100644
--- a/doc/vim-ai.txt
+++ b/doc/vim-ai.txt
@@ -54,7 +54,6 @@ https://platform.openai.com/docs/api-reference/completions
*:AIChat*
-
:AIChat continue or start a new conversation.
(selection)? :AIChat {instruction}? start a new conversation given the selection,
the instruction or both
@@ -67,11 +66,19 @@ Options: >
\ "temperature": 1,
\ "request_timeout": 10,
\ },
+ \ "ui": {
+ \ "open_chat_command": "below new | call vim_ai#MakeScratchWindow()",
+ \ },
\}
Check OpenAI docs for more infomration:
https://platform.openai.com/docs/api-reference/chat
+ *:AIRedo*
+
+:AIRedo repeat last AI command in order to re-try
+ or get an alternative completion.
+
CONFIGURATION *vim-ai-config*
To customize the default configuration, initialize the config variable with
@@ -100,6 +107,9 @@ Examples how configure key bindins and customize commands: >
xnoremap <leader>c :AIChat<CR>
nnoremap <leader>c :AIChat<CR>
+ " redo last AI command
+ nnoremap <leader>r :AIRedo<CR>
+
" command with custom context (vim-ai functions: AIRun, AIEditRun, AIChatRun)
command! -range -nargs=? AICode <line1>,<line2>call AIRun(<range>, "Programming syntax is " . &filetype . ", " . <f-args>)
diff --git a/plugin/vim-ai.vim b/plugin/vim-ai.vim
index 0672597..b117598 100644
--- a/plugin/vim-ai.vim
+++ b/plugin/vim-ai.vim
@@ -36,29 +36,41 @@ let g:vim_ai_chat_default = {
\ "request_timeout": 20,
\ "initial_prompt": s:vim_ai_chat_initial_prompt,
\ },
+\ "ui": {
+\ "open_chat_command": "below new | call vim_ai#MakeScratchWindow()"
+\ },
\}
-if !exists('g:vim_ai_complete')
- let g:vim_ai_complete = {"options":{}}
-endif
-if !exists('g:vim_ai_edit')
- let g:vim_ai_edit = {"options":{}}
-endif
-if !exists('g:vim_ai_chat')
- let g:vim_ai_chat = {"options":{}}
-endif
+function! s:ExtendDeep(defaults, override) abort
+ let l:result = a:defaults
+ for [l:key, l:value] in items(a:override)
+ if type(get(l:result, l:key)) is v:t_dict && type(l:value) is v:t_dict
+ call s:ExtendDeep(l:result[l:key], l:value)
+ else
+ let l:result[l:key] = l:value
+ endif
+ endfor
+ return l:result
+endfun
+
+function! s:MakeConfig(config_name) abort
+ let l:defaults = copy(g:[a:config_name . "_default"])
+ let l:override = exists("g:" . a:config_name) ? g:[a:config_name] : {}
+ let g:[a:config_name] = s:ExtendDeep(l:defaults, l:override)
+endfun
+
+call s:MakeConfig("vim_ai_chat")
+call s:MakeConfig("vim_ai_complete")
+call s:MakeConfig("vim_ai_edit")
let s:plugin_root = expand('<sfile>:p:h:h')
let s:complete_py = s:plugin_root . "/py/complete.py"
let s:chat_py = s:plugin_root . "/py/chat.py"
-function! ScratchWindow()
- below new
- setlocal buftype=nofile
- setlocal bufhidden=hide
- setlocal noswapfile
- setlocal ft=aichat
-endfunction
+" remembers last command parameters to be used in AIRedoRun
+let s:last_is_selection = 0
+let s:last_instruction = ""
+let s:last_command = ""
function! MakePrompt(is_selection, lines, instruction)
let lines = trim(join(a:lines, "\n"))
@@ -70,9 +82,14 @@ function! MakePrompt(is_selection, lines, instruction)
endfunction
function! AIRun(is_selection, ...) range
+ let instruction = a:0 ? a:1 : ""
let lines = getline(a:firstline, a:lastline)
- let prompt = MakePrompt(a:is_selection, lines, a:0 ? a:1 : "")
- let options_default = g:vim_ai_complete_default['options']
+ let prompt = MakePrompt(a:is_selection, lines, instruction)
+
+ let s:last_command = "complete"
+ let s:last_instruction = instruction
+ let s:last_is_selection = a:is_selection
+
let options = g:vim_ai_complete['options']
let cursor_on_empty_line = trim(join(lines, "\n")) == ""
set paste
@@ -87,8 +104,13 @@ function! AIRun(is_selection, ...) range
endfunction
function! AIEditRun(is_selection, ...) range
- let prompt = MakePrompt(a:is_selection, getline(a:firstline, a:lastline), a:0 ? a:1 : "")
- let options_default = g:vim_ai_edit_default['options']
+ let instruction = a:0 ? a:1 : ""
+ let prompt = MakePrompt(a:is_selection, getline(a:firstline, a:lastline), instruction)
+
+ let s:last_command = "edit"
+ let s:last_instruction = instruction
+ let s:last_is_selection = a:is_selection
+
let options = g:vim_ai_edit['options']
set paste
execute "normal! " . a:firstline . "GV" . a:lastline . "Gc"
@@ -97,24 +119,52 @@ function! AIEditRun(is_selection, ...) range
endfunction
function! AIChatRun(is_selection, ...) range
+ let instruction = ""
let lines = getline(a:firstline, a:lastline)
set paste
let is_outside_of_chat_window = search('^>>> user$', 'nw') == 0
if is_outside_of_chat_window
- call ScratchWindow()
+ " open chat window
+ execute g:vim_ai_chat['ui']['open_chat_command']
let prompt = ""
if a:0 || a:is_selection
- let prompt = MakePrompt(a:is_selection, lines, a:0 ? a:1 : "")
+ let instruction = a:0 ? a:1 : ""
+ let prompt = MakePrompt(a:is_selection, lines, instruction)
endif
- execute "normal i>>> user\n\n" . prompt
+ execute "normal! Gi" . prompt
endif
- let options_default = g:vim_ai_chat_default['options']
+ let s:last_command = "chat"
+ let s:last_instruction = instruction
+ let s:last_is_selection = a:is_selection
+
let options = g:vim_ai_chat['options']
execute "py3file " . s:chat_py
set nopaste
endfunction
+function! AIRedoRun()
+ execute "normal! u"
+ if s:last_command == "complete"
+ if s:last_is_selection
+ '<,'>call AIRun(s:last_is_selection, s:last_instruction)
+ else
+ call AIRun(s:last_is_selection, s:last_instruction)
+ endif
+ endif
+ if s:last_command == "edit"
+ if s:last_is_selection
+ '<,'>call AIEditRun(s:last_is_selection, s:last_instruction)
+ else
+ call AIEditRun(s:last_is_selection, s:last_instruction)
+ endif
+ endif
+ if s:last_command == "chat"
+ call AIChatRun(s:last_is_selection, s:last_instruction)
+ endif
+endfunction
+
command! -range -nargs=? AI <line1>,<line2>call AIRun(<range>, <f-args>)
command! -range -nargs=? AIEdit <line1>,<line2>call AIEditRun(<range>, <f-args>)
command! -range -nargs=? AIChat <line1>,<line2>call AIChatRun(<range>, <f-args>)
+command! AIRedo call AIRedoRun()
diff --git a/py/chat.py b/py/chat.py
index 6b7c2e5..9656bf2 100644
--- a/py/chat.py
+++ b/py/chat.py
@@ -4,7 +4,7 @@ import openai
plugin_root = vim.eval("s:plugin_root")
vim.command(f"py3file {plugin_root}/py/utils.py")
-options = make_options()
+options = vim.eval("options")
request_options = make_request_options()
openai.api_key = load_api_key()
@@ -16,26 +16,38 @@ prompt = f"{initial_prompt}\n{file_content}"
lines = prompt.splitlines()
messages = []
-with open('/tmp/prompt.aichat', 'w') as f:
- f.write(prompt)
+def parse_messages():
+ file_content = vim.eval('trim(join(getline(1, "$"), "\n"))')
+ lines = file_content.splitlines()
+ messages = []
+ for line in lines:
+ if line.startswith(">>> system"):
+ messages.append({"role": "system", "content": ""})
+ continue
+ if line.startswith(">>> user"):
+ messages.append({"role": "user", "content": ""})
+ continue
+ if line.startswith("<<< assistant"):
+ messages.append({"role": "assistant", "content": ""})
+ continue
+ if not messages:
+ continue
+ messages[-1]["content"] += "\n" + line
+ return messages
-for line in lines:
- if line.startswith(">>> system"):
- messages.append({"role": "system", "content": ""})
- continue
- if line.startswith(">>> user"):
- messages.append({"role": "user", "content": ""})
- continue
- if line.startswith("<<< assistant"):
- messages.append({"role": "assistant", "content": ""})
- continue
- if not messages:
- continue
- messages[-1]["content"] += "\n" + line
+messages = parse_messages()
if not messages:
- file_content = ">>> user\n\n" + file_content
- messages.append({"role": "user", "content": file_content })
+ # roles not found, put whole file content as an user prompt
+ vim.command("normal! ggO>>> user\n")
+ vim.command("normal! G")
+ vim.command("let &ul=&ul") # breaks undo sequence (https://vi.stackexchange.com/a/29087)
+ vim.command("redraw")
+ messages = parse_messages()
+
+for message in messages:
+ # strip newlines from the content as it causes empty responses
+ message["content"] = message["content"].strip()
try:
if messages[-1]["content"].strip():
diff --git a/py/complete.py b/py/complete.py
index 40f6efb..bddd8e6 100644
--- a/py/complete.py
+++ b/py/complete.py
@@ -4,13 +4,13 @@ import openai
plugin_root = vim.eval("s:plugin_root")
vim.command(f"py3file {plugin_root}/py/utils.py")
-prompt = vim.eval("prompt")
+prompt = vim.eval("prompt").strip()
request_options = make_request_options()
openai.api_key = load_api_key()
try:
- if prompt.strip():
+ if prompt:
print('Completing...')
vim.command("redraw")
diff --git a/py/utils.py b/py/utils.py
index 3e5295a..02ecfef 100644
--- a/py/utils.py
+++ b/py/utils.py
@@ -11,14 +11,8 @@ def load_api_key():
pass
return api_key.strip()
-def make_options():
- options_default = vim.eval("options_default")
- options_user = vim.eval("options")
- options = {**options_default, **options_user}
- return options
-
def make_request_options():
- options = make_options()
+ options = vim.eval("options")
request_options = {}
request_options['model'] = options['model']
request_options['max_tokens'] = int(options['max_tokens'])