Skip to content

Commit

Permalink
feat: ai-prompt-template plugin (apache#11517)
Browse files Browse the repository at this point in the history
  • Loading branch information
shreemaan-abhishek authored Aug 29, 2024
1 parent 9c81c93 commit e775640
Show file tree
Hide file tree
Showing 7 changed files with 655 additions and 0 deletions.
1 change: 1 addition & 0 deletions apisix/cli/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,7 @@ local _M = {
"authz-keycloak",
"proxy-cache",
"body-transformer",
"ai-prompt-template",
"proxy-mirror",
"proxy-rewrite",
"workflow",
Expand Down
146 changes: 146 additions & 0 deletions apisix/plugins/ai-prompt-template.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
--
-- Licensed to the Apache Software Foundation (ASF) under one or more
-- contributor license agreements. See the NOTICE file distributed with
-- this work for additional information regarding copyright ownership.
-- The ASF licenses this file to You under the Apache License, Version 2.0
-- (the "License"); you may not use this file except in compliance with
-- the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
local core = require("apisix.core")
local body_transformer = require("apisix.plugins.body-transformer")
local ipairs = ipairs

local prompt_schema = {
properties = {
role = {
type = "string",
enum = { "system", "user", "assistant" }
},
content = {
type = "string",
minLength = 1,
}
},
required = { "role", "content" }
}

local prompts = {
type = "array",
minItems = 1,
items = prompt_schema
}

local schema = {
type = "object",
properties = {
templates = {
type = "array",
minItems = 1,
items = {
type = "object",
properties = {
name = {
type = "string",
minLength = 1,
},
template = {
type = "object",
properties = {
model = {
type = "string",
minLength = 1,
},
messages = prompts
}
}
},
required = {"name", "template"}
}
},
},
required = {"templates"},
}


local _M = {
version = 0.1,
priority = 1060,
name = "ai-prompt-template",
schema = schema,
}

local templates_lrucache = core.lrucache.new({
ttl = 300, count = 256
})

local templates_json_lrucache = core.lrucache.new({
ttl = 300, count = 256
})

function _M.check_schema(conf)
return core.schema.check(schema, conf)
end


local function get_request_body_table()
local body, err = core.request.get_body()
if not body then
return nil, { message = "could not get body: " .. err }
end

local body_tab, err = core.json.decode(body)
if not body_tab then
return nil, { message = "could not get parse JSON request body: ", err }
end

return body_tab
end


local function find_template(conf, template_name)
for _, template in ipairs(conf.templates) do
if template.name == template_name then
return template.template
end
end
return nil
end

function _M.rewrite(conf, ctx)
local body_tab, err = get_request_body_table()
if not body_tab then
return 400, err
end
local template_name = body_tab.template_name
if not template_name then
return 400, { message = "template name is missing in request." }
end

local template = templates_lrucache(template_name, conf, find_template, conf, template_name)
if not template then
return 400, { message = "template: " .. template_name .. " not configured." }
end

local template_json = templates_json_lrucache(template, template, core.json.encode, template)
core.log.info("sending template to body_transformer: ", template_json)
return body_transformer.rewrite(
{
request = {
template = template_json,
input_format = "json"
}
},
ctx
)
end


return _M
1 change: 1 addition & 0 deletions conf/config.yaml.example
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,7 @@ plugins: # plugin list (sorted by priority)
#- error-log-logger # priority: 1091
- proxy-cache # priority: 1085
- body-transformer # priority: 1080
- ai-prompt-template # priority: 1060
- proxy-mirror # priority: 1010
- proxy-rewrite # priority: 1008
- workflow # priority: 1006
Expand Down
1 change: 1 addition & 0 deletions docs/en/latest/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@
"plugins/proxy-rewrite",
"plugins/grpc-transcode",
"plugins/grpc-web",
"plugins/ai-prompt-template",
"plugins/fault-injection",
"plugins/mocking",
"plugins/degraphql",
Expand Down
102 changes: 102 additions & 0 deletions docs/en/latest/plugins/ai-prompt-template.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
---
title: ai-prompt-template
keywords:
- Apache APISIX
- API Gateway
- Plugin
- ai-prompt-template
description: This document contains information about the Apache APISIX ai-prompt-template Plugin.
---

<!--
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
-->

## Description

The `ai-prompt-template` plugin simplifies access to LLM providers, such as OpenAI and Anthropic, and their models by predefining the request format
using a template, which only allows users to pass customized values into template variables.

## Plugin Attributes

| **Field** | **Required** | **Type** | **Description** |
| ------------------------------------- | ------------ | -------- | --------------------------------------------------------------------------------------------------------------------------- |
| `templates` | Yes | Array | An array of template objects |
| `templates.name` | Yes | String | Name of the template. |
| `templates.template.model` | Yes | String | Model of the AI Model, for example `gpt-4` or `gpt-3.5`. See your LLM provider API documentation for more available models. |
| `templates.template.messages.role` | Yes | String | Role of the message (`system`, `user`, `assistant`) |
| `templates.template.messages.content` | Yes | String | Content of the message. |

## Example usage

Create a route with the `ai-prompt-template` plugin like so:

```shell
curl "http://127.0.0.1:9180/apisix/admin/routes/1" -X PUT \
-H "X-API-KEY: ${ADMIN_API_KEY}" \
-d '{
"uri": "/v1/chat/completions",
"upstream": {
"type": "roundrobin",
"nodes": {
"api.openai.com:443": 1
},
"scheme": "https",
"pass_host": "node"
},
"plugins": {
"ai-prompt-template": {
"templates": [
{
"name": "level of detail",
"template": {
"model": "gpt-4",
"messages": [
{
"role": "user",
"content": "Explain about {{ topic }} in {{ level }}."
}
]
}
}
]
}
}
}'
```

Now send a request:

```shell
curl http://127.0.0.1:9080/v1/chat/completions -i -XPOST -H 'Content-Type: application/json' -d '{
"template_name": "level of detail",
"topic": "psychology",
"level": "brief"
}' -H "Authorization: Bearer <your token here>"
```

Then the request body will be modified to something like this:

```json
{
"model": "some model",
"messages": [
{ "role": "user", "content": "Explain about psychology in brief." }
]
}
```
1 change: 1 addition & 0 deletions t/admin/plugins.t
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ opa
authz-keycloak
proxy-cache
body-transformer
ai-prompt-template
proxy-mirror
proxy-rewrite
workflow
Expand Down
Loading

0 comments on commit e775640

Please sign in to comment.