Skip to content

Commit 74b9fc4

Browse files
committed
refactor: foundations for chatbot (code divided into scenario selection, scenario handling, and data retrieval)
Signed-off-by: Hunter Gregory <[email protected]>
1 parent df4a302 commit 74b9fc4

File tree

17 files changed

+695
-345
lines changed

17 files changed

+695
-345
lines changed

ai/README.md

-4
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,3 @@
1515
- `$env:AOAI_COMPLETIONS_ENDPOINT = Read-Host 'Enter AOAI_COMPLETIONS_ENDPOINT'`
1616
- `$env:AOAI_DEPLOYMENT_NAME = Read-Host 'Enter AOAI_DEPLOYMENT_NAME'`
1717
- `go run main.go`
18-
19-
## Development
20-
21-
Modify prompts in the folders within *pkg/analysis/* (e.g. *pkg/analysis/flows/prompt.go* or *analyzer.go*)

ai/main.go

+4-25
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,11 @@
11
package main
22

33
import (
4-
"context"
5-
4+
"github.com/microsoft/retina/ai/pkg/chat"
65
"github.com/microsoft/retina/ai/pkg/lm"
7-
flowscenario "github.com/microsoft/retina/ai/pkg/scenarios/flows"
86

97
"github.com/sirupsen/logrus"
108
"k8s.io/client-go/kubernetes"
11-
"k8s.io/client-go/rest"
129
"k8s.io/client-go/tools/clientcmd"
1310
)
1411

@@ -43,26 +40,8 @@ func main() {
4340
}
4441
log.Info("initialized Azure OpenAI model")
4542

46-
handleChat(log, config, clientset, model)
47-
}
48-
49-
// pretend there's input from chat interface
50-
func handleChat(log logrus.FieldLogger, config *rest.Config, clientset *kubernetes.Clientset, model lm.Model) {
51-
question := "what's wrong with my app?"
52-
var chat lm.ChatHistory
53-
54-
h := flowscenario.NewHandler(log, config, clientset, model)
55-
params := &flowscenario.ScenarioParams{
56-
Scenario: flowscenario.DropScenario,
57-
Namespace1: "default",
58-
Namespace2: "default",
43+
bot := chat.NewBot(log, config, clientset, model)
44+
if err := bot.Loop(); err != nil {
45+
log.WithError(err).Fatal("error running chat loop")
5946
}
60-
61-
ctx := context.TODO()
62-
response, err := h.Handle(ctx, question, chat, params)
63-
if err != nil {
64-
log.WithError(err).Fatal("error running flows scenario")
65-
}
66-
67-
_ = response
6847
}

ai/pkg/analysis/flows/analyzer.go

-26
This file was deleted.

ai/pkg/analysis/flows/types.go

-78
This file was deleted.

ai/pkg/chat/chat.go

+91
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
package chat
2+
3+
import (
4+
"context"
5+
"fmt"
6+
7+
"github.com/microsoft/retina/ai/pkg/lm"
8+
flowretrieval "github.com/microsoft/retina/ai/pkg/retrieval/flows"
9+
"github.com/microsoft/retina/ai/pkg/scenarios"
10+
"github.com/microsoft/retina/ai/pkg/scenarios/dns"
11+
"github.com/microsoft/retina/ai/pkg/scenarios/drops"
12+
13+
"github.com/sirupsen/logrus"
14+
"k8s.io/client-go/kubernetes"
15+
"k8s.io/client-go/rest"
16+
)
17+
18+
var (
19+
definitions = []*scenarios.Definition{
20+
drops.Definition,
21+
dns.Definition,
22+
}
23+
)
24+
25+
type Bot struct {
26+
log logrus.FieldLogger
27+
config *rest.Config
28+
clientset *kubernetes.Clientset
29+
model lm.Model
30+
}
31+
32+
// input log, config, clientset, model
33+
func NewBot(log logrus.FieldLogger, config *rest.Config, clientset *kubernetes.Clientset, model lm.Model) *Bot {
34+
return &Bot{
35+
log: log.WithField("component", "chat"),
36+
config: config,
37+
clientset: clientset,
38+
model: model,
39+
}
40+
}
41+
42+
func (b *Bot) Loop() error {
43+
var history lm.ChatHistory
44+
flowRetriever := flowretrieval.NewRetriever(b.log, b.config, b.clientset)
45+
46+
for {
47+
// TODO get user input
48+
question := "what's wrong with my app?"
49+
50+
// select scenario and get parameters
51+
definition, params, err := b.selectScenario(question, history)
52+
if err != nil {
53+
return fmt.Errorf("error selecting scenario: %w", err)
54+
}
55+
56+
// cfg.FlowRetriever.UseFile()
57+
58+
cfg := &scenarios.Config{
59+
Log: b.log,
60+
Config: b.config,
61+
Clientset: b.clientset,
62+
Model: b.model,
63+
FlowRetriever: flowRetriever,
64+
}
65+
66+
ctx := context.TODO()
67+
response, err := definition.Handle(ctx, cfg, params, question, history)
68+
if err != nil {
69+
return fmt.Errorf("error handling scenario: %w", err)
70+
}
71+
72+
fmt.Println(response)
73+
74+
// TODO keep chat loop going
75+
break
76+
}
77+
78+
return nil
79+
}
80+
81+
func (b *Bot) selectScenario(question string, history lm.ChatHistory) (*scenarios.Definition, map[string]string, error) {
82+
// TODO use chat interface
83+
// FIXME hard-coding the scenario and params for now
84+
d := definitions[0]
85+
params := map[string]string{
86+
scenarios.Namespace1.Name: "default",
87+
scenarios.Namespace2.Name: "default",
88+
}
89+
90+
return d, params, nil
91+
}

ai/pkg/lm/azure-openai.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -65,11 +65,11 @@ func NewAzureOpenAI() (*AzureOpenAI, error) {
6565
return aoai, nil
6666
}
6767

68-
func (m *AzureOpenAI) Generate(ctx context.Context, systemPrompt string, chat ChatHistory, message string) (string, error) {
68+
func (m *AzureOpenAI) Generate(ctx context.Context, systemPrompt string, history ChatHistory, message string) (string, error) {
6969
messages := []azopenai.ChatRequestMessageClassification{
7070
&azopenai.ChatRequestSystemMessage{Content: to.Ptr(systemPrompt)},
7171
}
72-
for _, pair := range chat {
72+
for _, pair := range history {
7373
messages = append(messages, &azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent(pair.User)})
7474
messages = append(messages, &azopenai.ChatRequestAssistantMessage{Content: to.Ptr(pair.Assistant)})
7575
}

ai/pkg/lm/echo.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,9 @@ func NewEchoModel() *EchoModel {
1313
return &EchoModel{}
1414
}
1515

16-
func (m *EchoModel) Generate(ctx context.Context, systemPrompt string, chat ChatHistory, message string) (string, error) {
17-
chatStrings := make([]string, 0, len(chat))
18-
for _, pair := range chat {
16+
func (m *EchoModel) Generate(ctx context.Context, systemPrompt string, history ChatHistory, message string) (string, error) {
17+
chatStrings := make([]string, 0, len(history))
18+
for _, pair := range history {
1919
chatStrings = append(chatStrings, fmt.Sprintf("USER: %s\nASSISTANT: %s\n", pair.User, pair.Assistant))
2020
}
2121
resp := fmt.Sprintf("systemPrompt: %s\nhistory: %s\nmessage: %s", systemPrompt, strings.Join(chatStrings, "\n"), message)

ai/pkg/lm/model.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,5 +10,5 @@ type MessagePair struct {
1010
type ChatHistory []MessagePair
1111

1212
type Model interface {
13-
Generate(ctx context.Context, systemPrompt string, chat ChatHistory, message string) (string, error)
13+
Generate(ctx context.Context, systemPrompt string, history ChatHistory, message string) (string, error)
1414
}

ai/pkg/analysis/flows/parser.go ai/pkg/parse/flows/parser.go

+8-8
Original file line numberDiff line numberDiff line change
@@ -9,19 +9,19 @@ import (
99
)
1010

1111
type Parser struct {
12-
log logrus.FieldLogger
13-
summary FlowSummary
12+
log logrus.FieldLogger
13+
connections Connections
1414
}
1515

1616
func NewParser(log logrus.FieldLogger) *Parser {
1717
return &Parser{
18-
log: log.WithField("component", "flow-parser"),
19-
summary: make(map[string]*Connection),
18+
log: log.WithField("component", "flow-parser"),
19+
connections: make(map[string]*Connection),
2020
}
2121
}
2222

23-
func (p *Parser) Summary() FlowSummary {
24-
return p.summary
23+
func (p *Parser) Connections() Connections {
24+
return p.connections
2525
}
2626

2727
func (p *Parser) Parse(flows []*flowpb.Flow) {
@@ -57,15 +57,15 @@ func (p *Parser) addFlow(f *flowpb.Flow) error {
5757
pod1, pod2 := pods[0], pods[1]
5858
key := pod1 + "#" + pod2
5959

60-
conn, exists := p.summary[key]
60+
conn, exists := p.connections[key]
6161
if !exists {
6262
conn = &Connection{
6363
Pod1: pod1,
6464
Pod2: pod2,
6565
Key: key,
6666
Flows: []*flowpb.Flow{},
6767
}
68-
p.summary[key] = conn
68+
p.connections[key] = conn
6969
}
7070

7171
conn.Flows = append(conn.Flows, f)

0 commit comments

Comments
 (0)