Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions agents/agents.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,17 @@ package agents
import (
"context"

"github.com/vendasta/langchaingo/chains"
"github.com/vendasta/langchaingo/schema"
"github.com/vendasta/langchaingo/tools"
)

// Agent is the interface all agents must implement.
type Agent interface {
// Plan Given an input and previous steps decide what to do next. Returns
// either actions or a finish.
Plan(ctx context.Context, intermediateSteps []schema.AgentStep, inputs map[string]string) ([]schema.AgentAction, *schema.AgentFinish, error) //nolint:lll
// either actions or a finish. Options can be passed to configure LLM
// parameters like temperature, max tokens, etc.
Plan(ctx context.Context, intermediateSteps []schema.AgentStep, inputs map[string]string, options ...chains.ChainCallOption) ([]schema.AgentAction, *schema.AgentFinish, error) //nolint:lll
GetInputKeys() []string
GetOutputKeys() []string
GetTools() []tools.Tool
Expand Down
13 changes: 10 additions & 3 deletions agents/conversational.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ func (a *ConversationalAgent) Plan(
ctx context.Context,
intermediateSteps []schema.AgentStep,
inputs map[string]string,
options ...chains.ChainCallOption,
) ([]schema.AgentAction, *schema.AgentFinish, error) {
fullInputs := make(map[string]any, len(inputs))
for key, value := range inputs {
Expand All @@ -80,12 +81,18 @@ func (a *ConversationalAgent) Plan(
}
}

// Build options for chains.Predict, including user-provided options
predictOptions := []chains.ChainCallOption{
chains.WithStopWords([]string{"\nObservation:", "\n\tObservation:"}),
chains.WithStreamingFunc(stream),
}
predictOptions = append(predictOptions, options...)

output, err := chains.Predict(
ctx,
a.Chain,
fullInputs,
chains.WithStopWords([]string{"\nObservation:", "\n\tObservation:"}),
chains.WithStreamingFunc(stream),
predictOptions...,
)
if err != nil {
return nil, nil, err
Expand Down Expand Up @@ -144,7 +151,7 @@ func (a *ConversationalAgent) parseOutput(output string) ([]schema.AgentAction,
return nil, finishAction, nil
}

r := regexp.MustCompile(`Action: (.*?)[\n]*Action Input: (.*)`)
r := regexp.MustCompile(`Action: (.*?)[\n]*(?s)Action Input: (.*)`)
matches := r.FindStringSubmatch(output)
if len(matches) == 0 {
return nil, nil, fmt.Errorf("%w: %s", ErrUnableToParseOutput, output)
Expand Down
7 changes: 4 additions & 3 deletions agents/executor.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ func NewExecutor(agent Agent, opts ...Option) *Executor {
}
}

func (e *Executor) Call(ctx context.Context, inputValues map[string]any, _ ...chains.ChainCallOption) (map[string]any, error) { //nolint:lll
func (e *Executor) Call(ctx context.Context, inputValues map[string]any, options ...chains.ChainCallOption) (map[string]any, error) { //nolint:lll
inputs, err := inputsToString(inputValues)
if err != nil {
return nil, err
Expand All @@ -57,7 +57,7 @@ func (e *Executor) Call(ctx context.Context, inputValues map[string]any, _ ...ch
steps := make([]schema.AgentStep, 0)
for i := 0; i < e.MaxIterations; i++ {
var finish map[string]any
steps, finish, err = e.doIteration(ctx, steps, nameToTool, inputs)
steps, finish, err = e.doIteration(ctx, steps, nameToTool, inputs, options...)
if finish != nil || err != nil {
return finish, err
}
Expand All @@ -79,8 +79,9 @@ func (e *Executor) doIteration( // nolint
steps []schema.AgentStep,
nameToTool map[string]tools.Tool,
inputs map[string]string,
options ...chains.ChainCallOption,
) ([]schema.AgentStep, map[string]any, error) {
actions, finish, err := e.Agent.Plan(ctx, steps, inputs)
actions, finish, err := e.Agent.Plan(ctx, steps, inputs, options...)
if errors.Is(err, ErrUnableToParseOutput) && e.ErrorHandler != nil {
formattedObservation := err.Error()
if e.ErrorHandler.Formatter != nil {
Expand Down
1 change: 1 addition & 0 deletions agents/executor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ func (a *testAgent) Plan(
_ context.Context,
intermediateSteps []schema.AgentStep,
inputs map[string]string,
_ ...chains.ChainCallOption,
) ([]schema.AgentAction, *schema.AgentFinish, error) {
a.recordedIntermediateSteps = intermediateSteps
a.recordedInputs = inputs
Expand Down
11 changes: 9 additions & 2 deletions agents/mrkl.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ func (a *OneShotZeroAgent) Plan(
ctx context.Context,
intermediateSteps []schema.AgentStep,
inputs map[string]string,
options ...chains.ChainCallOption,
) ([]schema.AgentAction, *schema.AgentFinish, error) {
fullInputs := make(map[string]any, len(inputs))
for key, value := range inputs {
Expand All @@ -80,12 +81,18 @@ func (a *OneShotZeroAgent) Plan(
}
}

// Build options for chains.Predict, including user-provided options
predictOptions := []chains.ChainCallOption{
chains.WithStopWords([]string{"\nObservation:", "\n\tObservation:"}),
chains.WithStreamingFunc(stream),
}
predictOptions = append(predictOptions, options...)

output, err := chains.Predict(
ctx,
a.Chain,
fullInputs,
chains.WithStopWords([]string{"\nObservation:", "\n\tObservation:"}),
chains.WithStreamingFunc(stream),
predictOptions...,
)
if err != nil {
return nil, nil, err
Expand Down
9 changes: 7 additions & 2 deletions agents/openai_functions_agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"fmt"

"github.com/vendasta/langchaingo/callbacks"
"github.com/vendasta/langchaingo/chains"
"github.com/vendasta/langchaingo/llms"
"github.com/vendasta/langchaingo/prompts"
"github.com/vendasta/langchaingo/schema"
Expand Down Expand Up @@ -71,6 +72,7 @@ func (o *OpenAIFunctionsAgent) Plan(
ctx context.Context,
intermediateSteps []schema.AgentStep,
inputs map[string]string,
options ...chains.ChainCallOption,
) ([]schema.AgentAction, *schema.AgentFinish, error) {
fullInputs := make(map[string]any, len(inputs))
for key, value := range inputs {
Expand Down Expand Up @@ -147,8 +149,11 @@ func (o *OpenAIFunctionsAgent) Plan(
mcList[i] = mc
}

result, err := o.LLM.GenerateContent(ctx, mcList,
llms.WithFunctions(o.functions()), llms.WithStreamingFunc(stream))
// Build LLM call options, including user-provided options
llmOptions := []llms.CallOption{llms.WithFunctions(o.functions()), llms.WithStreamingFunc(stream)}
llmOptions = append(llmOptions, chains.GetLLMCallOptions(options...)...)

result, err := o.LLM.GenerateContent(ctx, mcList, llmOptions...)
if err != nil {
return nil, nil, err
}
Expand Down
10 changes: 9 additions & 1 deletion chains/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,10 @@ func WithCallback(callbackHandler callbacks.Handler) ChainCallOption {
}
}

func getLLMCallOptions(options ...ChainCallOption) []llms.CallOption { //nolint:cyclop
// GetLLMCallOptions converts ChainCallOption slice to llms.CallOption slice.
// This is useful for agents and other code that needs to propagate chain options
// to direct LLM calls.
func GetLLMCallOptions(options ...ChainCallOption) []llms.CallOption { //nolint:cyclop
opts := &chainCallOption{}
for _, option := range options {
option(opts)
Expand Down Expand Up @@ -209,3 +212,8 @@ func getLLMCallOptions(options ...ChainCallOption) []llms.CallOption { //nolint:

return chainCallOption
}

// getLLMCallOptions is a backward-compatibility wrapper for GetLLMCallOptions.
func getLLMCallOptions(options ...ChainCallOption) []llms.CallOption {
return GetLLMCallOptions(options...)
}
Loading