From 2546130fa5f854ab33f895e94f1ba98bb53252e4 Mon Sep 17 00:00:00 2001 From: jackmc Date: Fri, 15 Dec 2023 03:44:24 +0000 Subject: [PATCH] feat: model intent --- internal/api/handler.go | 18 ++++++++++------- internal/api/handler_test.go | 37 +++++++++++++++++++++++++++++++++++ internal/pkg/alexa/intents.go | 1 + 3 files changed, 49 insertions(+), 7 deletions(-) diff --git a/internal/api/handler.go b/internal/api/handler.go index d5fd03d..078745e 100644 --- a/internal/api/handler.go +++ b/internal/api/handler.go @@ -31,24 +31,28 @@ func (h *Handler) randomFact(ctx context.Context) (string, error) { func (h *Handler) DispatchIntents(ctx context.Context, req alexa.Request) (res alexa.Response, err error) { switch req.Body.Intent.Name { - case alexa.AutoCompleteIntent: - prompt := req.Body.Intent.Slots["prompt"].Value - h.Logger.With("prompt", prompt).Info("found phrase to autocomplete") + case alexa.ModelIntent: + model := req.Body.Intent.Slots["chatModel"].Value + h.Logger.With("model", model).Info("found model to use") - switch strings.ToLower(prompt) { - case "use gemini": + switch strings.ToLower(model) { + case "gemini": h.Model = chatmodels.CHAT_MODEL_GEMINI res = alexa.NewResponse("Autocomplete", "ok", false) return - case "use gpt": + case "gpt": h.Model = chatmodels.CHAT_MODEL_GPT res = alexa.NewResponse("Autocomplete", "ok", false) return - case "what model", "what model is in use": + default: res = alexa.NewResponse("Autocomplete", fmt.Sprintf("I am using the model %s", h.Model.String()), false) return } + case alexa.AutoCompleteIntent: + prompt := req.Body.Intent.Slots["prompt"].Value + h.Logger.With("prompt", prompt).Info("found phrase to autocomplete") + err = h.RequestsQueue.PushMessage(ctx, &chatmodels.Request{Prompt: prompt, Model: h.Model}) if err != nil { break diff --git a/internal/api/handler_test.go b/internal/api/handler_test.go index 8d618d1..cbe6b6c 100644 --- a/internal/api/handler_test.go +++ b/internal/api/handler_test.go @@ -278,6 +278,43 @@ func TestHelpIntent(t *testing.T) { assert.False(t, resp.Body.ShouldEndSession) } +func TestModelIntent(t *testing.T) { + mockChatGptService := &chatmodels.MockClient{} + h := &Handler{ + ChatGptService: mockChatGptService, + Logger: logger, + Model: chatmodels.CHAT_MODEL_GPT, + } + + req := alexa.Request{ + Version: "", + Session: alexa.Session{}, + Body: alexa.ReqBody{ + Intent: alexa.Intent{ + Name: alexa.ModelIntent, + Slots: map[string]alexa.Slot{ + "chatModel": { + Name: "chatModel", + Value: "gpt", + Resolutions: alexa.Resolutions{}, + }, + }, + }, + Type: alexa.IntentRequestType, + }, + Context: alexa.Context{}, + } + + resp, err := h.Invoke(context.Background(), req) + assert.NoError(t, err) + assert.EqualValues( + t, + resp.Body.OutputSpeech.Text, + "ok", + ) + assert.False(t, resp.Body.ShouldEndSession) +} + func TestUnsupportedIntent(t *testing.T) { mockChatGptService := &chatmodels.MockClient{} h := &Handler{ diff --git a/internal/pkg/alexa/intents.go b/internal/pkg/alexa/intents.go index 9e01fd0..104ca21 100644 --- a/internal/pkg/alexa/intents.go +++ b/internal/pkg/alexa/intents.go @@ -8,5 +8,6 @@ const ( FallbackIntent = "AMAZON.FallbackIntent" AutoCompleteIntent = "AutoCompleteIntent" RandomFactIntent = "RandomFactIntent" + ModelIntent = "Model" LastResponseIntent = "LastResponseIntent" )