Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,12 @@ require("neoai").setup({
cutoff_width = 75,
},
prompts = {
default_prompt = function()
return "Please only follow instructions or answer to questions. Be concise."
end,
context_prompt = function(context)
return "Hey, I'd like to provide some context for future "
return "Please only follow instructions or answer to questions. Be concise. "
.. "I'd like to provide some context for future "
.. "messages. Here is the code/text that I want to refer "
.. "to in our upcoming conversations:\n\n"
.. context
Expand Down
2 changes: 2 additions & 0 deletions lua/neoai/chat/history.lua
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ function ChatHistory:new(model, params, context)
if context ~= nil then
local context_prompt = config.options.prompts.context_prompt(context)
self:set_prompt(context_prompt)
else
self:set_prompt(config.options.prompts.default_prompt())
end
return obj
end
Expand Down
8 changes: 6 additions & 2 deletions lua/neoai/chat/models/openai.lua
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
local utils = require("neoai.utils")
local config = require("neoai.config")

--- This model definition supports OpenAI - and compatible APIs. The
--- configuration defaults to OpenAI

---@type ModelModule
local M = {}

M.name = "OpenAI"
M.name = config.options.open_ai.display_name

M._chunks = {}
local raw_chunks = {}
Expand Down Expand Up @@ -53,6 +56,7 @@ end
---@param on_complete fun(err?: string, output?: string) Function to call when model has finished
M.send_to_model = function(chat_history, on_stdout_chunk, on_complete)
local api_key = config.options.open_ai.api_key.get()
local url = config.options.open_ai.url

local data = {
model = chat_history.model,
Expand All @@ -67,7 +71,7 @@ M.send_to_model = function(chat_history, on_stdout_chunk, on_complete)
"--silent",
"--show-error",
"--no-buffer",
"https://api.openai.com/v1/chat/completions",
url,
"-H",
"Content-Type: application/json",
"-H",
Expand Down
15 changes: 11 additions & 4 deletions lua/neoai/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ M.get_defaults = function()
ui = {
output_popup_text = "NeoAI",
input_popup_text = "Prompt",
width = 30, -- As percentage eg. 30%
width = 30, -- As percentage eg. 30%
output_popup_height = 80, -- As percentage eg. 80%
submit = "<Enter>",
},
Expand All @@ -34,17 +34,22 @@ M.get_defaults = function()
},
prompts = {
context_prompt = function(context)
return "Hey, I'd like to provide some context for future "
return "Please only follow instructions or answer to questions. Be concise. "
.. "I'd like to provide some context for future "
.. "messages. Here is the code/text that I want to refer "
.. "to in our upcoming conversations (TEXT/CODE ONLY):\n\n"
.. "to in our upcoming conversations:\n\n"
.. context
end,
default_prompt = function()
return "Please only follow instructions or answer to questions. Be concise."
end,
},
mappings = {
["select_up"] = "<C-k>",
["select_down"] = "<C-j>",
},
open_ai = {
display_name = "OpenAI",
api_key = {
env = "OPENAI_API_KEY",
value = nil,
Expand Down Expand Up @@ -72,6 +77,7 @@ M.get_defaults = function()
error(msg)
end,
},
url = "https://api.openai.com/v1/chat/completions",
},
shortcuts = {
{
Expand Down Expand Up @@ -122,7 +128,8 @@ end
---@field cutoff_width integer | nil When injecting if the text becomes longer than this then it should go to a new line, if nil then ignore

---@class Prompt_Options
---@field context_prompt fun(context: string) string Prompt to generate the prompt that should be used when using Context modes
---@field context_prompt fun(context: string): string Prompt to generate the prompt that should be used when using Context modes
---@field default_prompt fun(): string string Prompt to generate the prompt that should be used as default prompt

---@class Shortcut
---@field name string The name of the shortcut, can trigger using :NeoAIShortcut <name>
Expand Down