-
-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Expand file tree
/
Copy pathfakellm.go
More file actions
57 lines (49 loc) · 1.35 KB
/
fakellm.go
File metadata and controls
57 lines (49 loc) · 1.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21<
7317
/div>
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
package fake
import (
"context"
"errors"
"github.com/tmc/langchaingo/llms"
)
type LLM struct {
responses []string
index int
}
func NewFakeLLM(responses []string) *LLM {
return &LLM{
responses: responses,
index: 0,
}
}
// GenerateContent generate fake content.
func (f *LLM) GenerateContent(_ context.Context, _ []llms.MessageContent, _ ...llms.CallOption) (*llms.ContentResponse, error) {
if len(f.responses) == 0 {
return nil, errors.New("no responses configured")
}
if f.index >= len(f.responses) {
f.index = 0 // reset index
}
response := f.responses[f.index]
f.index++
return &llms.ContentResponse{
Choices: []*llms.ContentChoice{{Content: response}},
}, nil
}
// Call the model with a prompt.
func (f *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error) {
resp, err := f.GenerateContent(ctx, []llms.MessageContent{{Role: llms.ChatMessageTypeHuman, Parts: []llms.ContentPart{llms.TextContent{Text: prompt}}}}, options...)
if err != nil {
return "", err
}
if len(resp.Choices) < 1 {
return "", errors.New("empty response from model")
}
return resp.Choices[0].Content, nil
}
// Reset the index to 0.
func (f *LLM) Reset() {
f.index = 0
}
// AddResponse adds a response to the list of responses.
func (f *LLM) AddResponse(response string) {
f.responses = append(f.responses, response)
}