Skip to content

Commit

Permalink
Added Unit Tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mirackara committed Mar 1, 2024
1 parent ff9c7a9 commit db50304
Show file tree
Hide file tree
Showing 4 changed files with 261 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,10 @@ func main() {
client := nropenai.NRNewClientWithConfig(cfg)

// Add any custom attributes

client.CustomAttributes = map[string]interface{}{
client.AddCustomAttributes(map[string]interface{}{
"llm.foo": "bar",
"ll.pi": 3.14,
}
})

// GPT Request
req := openai.ChatCompletionRequest{
Expand All @@ -50,7 +49,7 @@ func main() {
},
},
}
// Create Chat Completion
// NRCreateChatCompletion returns a wrapped version of openai.ChatCompletionResponse
resp, err := nropenai.NRCreateChatCompletion(client, req, app)

if err != nil {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ func main() {
},
},
}
// Create Chat Completion
// NRCreateChatCompletion returns a wrapped version of openai.ChatCompletionResponse
resp, err := nropenai.NRCreateChatCompletion(client, req, app)

if err != nil {
Expand Down
52 changes: 31 additions & 21 deletions v3/integrations/nropenai/nropenai.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,22 @@ import (
"context"
"errors"
"reflect"
"strings"

"github.com/google/uuid"
"github.com/newrelic/go-agent/v3/newrelic"
"github.com/sashabaranov/go-openai"
)

var (
errAIMonitoringDisabled = errors.New("AI Monitoring is set to disabled or High Security Mode is enabled. Please enable AI Monitoring and ensure High Security Mode is disabled.")
errAIMonitoringDisabled = errors.New("AI Monitoring is set to disabled or High Security Mode is enabled. Please enable AI Monitoring and ensure High Security Mode is disabled")
)

type OpenAIClient interface {
CreateChatCompletion(ctx context.Context, request openai.ChatCompletionRequest) (response openai.ChatCompletionResponse, err error)
CreateEmbeddings(ctx context.Context, conv openai.EmbeddingRequestConverter) (res openai.EmbeddingResponse, err error)
}

// Wrapper for OpenAI Configuration
type ConfigWrapper struct {
Config *openai.ClientConfig
Expand All @@ -25,12 +31,32 @@ type ConfigWrapper struct {

// Wrapper for OpenAI Client with Custom Attributes that can be set for all LLM Events
type ClientWrapper struct {
Client *openai.Client
Client OpenAIClient
LicenseKeyLastFour string
// Set of Custom Attributes that get tied to all LLM Events
CustomAttributes map[string]interface{}
}

// Adds Custom Attributes to the ClientWrapper
func (cw *ClientWrapper) AddCustomAttributes(attributes map[string]interface{}) {
if cw.CustomAttributes == nil {
cw.CustomAttributes = make(map[string]interface{})
}

for key, value := range attributes {
if strings.HasPrefix(key, "llm.") {
cw.CustomAttributes[key] = value
}
}
}

func AppendCustomAttributesToEvent(cw *ClientWrapper, data map[string]interface{}) map[string]interface{} {
for k, v := range cw.CustomAttributes {
data[k] = v
}
return data
}

// Wrapper for ChatCompletionResponse that is returned from NRCreateChatCompletion. It also includes the TraceID of the transaction for linking a chat response with it's feedback
type ChatCompletionResponseWrapper struct {
ChatCompletionResponse openai.ChatCompletionResponse
Expand Down Expand Up @@ -77,21 +103,6 @@ func NRNewClientWithConfig(config *ConfigWrapper) *ClientWrapper {
}
}

func AddCustomAttributes(cw *ClientWrapper, attributes map[string]interface{}) {
for key, value := range attributes {
if len(key) >= 4 && key[:4] == "llm." {
cw.CustomAttributes[key] = value
}
}
}

func AppendCustomAttributesToEvent(cw *ClientWrapper, data map[string]interface{}) map[string]interface{} {
for k, v := range cw.CustomAttributes {
data[k] = v
}
return data
}

func NRCreateChatCompletionSummary(txn *newrelic.Transaction, app *newrelic.Application, cw *ClientWrapper, req openai.ChatCompletionRequest) ChatCompletionResponseWrapper {
// Get App Config for setting App Name Attribute
appConfig, configErr := app.Config()
Expand Down Expand Up @@ -129,13 +140,16 @@ func NRCreateChatCompletionSummary(txn *newrelic.Transaction, app *newrelic.Appl
})
}

// ratelimitLimitTokensUsageBased, ratelimitResetTokensUsageBased, and ratelimitRemainingTokensUsageBased are not in the response
// Request Headers
ChatCompletionSummaryData["request.temperature"] = req.Temperature
ChatCompletionSummaryData["request.max_tokens"] = req.MaxTokens
ChatCompletionSummaryData["request.model"] = req.Model
ChatCompletionSummaryData["model"] = req.Model

// Response Data
ChatCompletionSummaryData["response.model"] = resp.Model
ChatCompletionSummaryData["request_id"] = resp.ID
ChatCompletionSummaryData["response.organization"] = resp.Header().Get("Openai-Organization")
ChatCompletionSummaryData["response.number_of_messages"] = len(resp.Choices)
ChatCompletionSummaryData["response.usage.total_tokens"] = resp.Usage.TotalTokens
Expand All @@ -153,19 +167,15 @@ func NRCreateChatCompletionSummary(txn *newrelic.Transaction, app *newrelic.Appl
ChatCompletionSummaryData["response.headers.ratelimitRemainingTokens"] = resp.Header().Get("X-Ratelimit-Remaining-Tokens")
ChatCompletionSummaryData["response.headers.ratelimitRemainingRequests"] = resp.Header().Get("X-Ratelimit-Remaining-Requests")

// ratelimitLimitTokensUsageBased, ratelimitResetTokensUsageBased, and ratelimitRemainingTokensUsageBased are not in the response

// New Relic Attributes
ChatCompletionSummaryData["id"] = uuid.String()
ChatCompletionSummaryData["request_id"] = resp.ID
ChatCompletionSummaryData["span_id"] = spanID
ChatCompletionSummaryData["transaction_id"] = transactionID
ChatCompletionSummaryData["trace_id"] = traceID
ChatCompletionSummaryData["api_key_last_four_digits"] = cw.LicenseKeyLastFour
ChatCompletionSummaryData["vendor"] = "OpenAI"
ChatCompletionSummaryData["ingest_source"] = "Go"
ChatCompletionSummaryData["appName"] = appConfig.AppName
ChatCompletionSummaryData["model"] = req.Model

// Record any custom attributes if they exist
ChatCompletionSummaryData = AppendCustomAttributesToEvent(cw, ChatCompletionSummaryData)
Expand Down
226 changes: 226 additions & 0 deletions v3/integrations/nropenai/nropenai_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,226 @@
package nropenai

import (
"context"
"net/http"
"testing"

"github.com/newrelic/go-agent/v3/internal"
"github.com/newrelic/go-agent/v3/internal/integrationsupport"
"github.com/newrelic/go-agent/v3/newrelic"
"github.com/sashabaranov/go-openai"
)

type MockOpenAIClient struct {
MockCreateChatCompletionResp openai.ChatCompletionResponse
MockCreateEmbeddingsResp openai.EmbeddingResponse
MockCreateChatCompletionErr error
}

// Mock CreateChatCompletion function that returns a mock response
func (m *MockOpenAIClient) CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
MockResponse := openai.ChatCompletionResponse{
ID: "chatcmpl-123",
Object: "chat.completion",
Created: 1677652288,
Model: openai.GPT3Dot5Turbo,
SystemFingerprint: "fp_44709d6fcb",
Usage: openai.Usage{
PromptTokens: 9,
CompletionTokens: 12,
TotalTokens: 21,
},
Choices: []openai.ChatCompletionChoice{
{
Index: 0,
Message: openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleAssistant,
Content: "\n\nHello there, how may I assist you today?",
},
},
},
}
hdrs := http.Header{}
hdrs.Add("X-Request-Id", "chatcmpl-123")
hdrs.Add("ratelimit-limit-tokens", "100")
hdrs.Add("Openai-Version", "2020-10-01")
hdrs.Add("X-Ratelimit-Limit-Requests", "10000")
hdrs.Add("X-Ratelimit-Limit-Tokens", "100")
hdrs.Add("X-Ratelimit-Reset-Tokens", "100")
hdrs.Add("X-Ratelimit-Reset-Requests", "10000")
hdrs.Add("X-Ratelimit-Remaining-Tokens", "100")
hdrs.Add("X-Ratelimit-Remaining-Requests", "10000")
hdrs.Add("Openai-Organization", "user-123")

MockResponse.SetHeader(hdrs)

return MockResponse, m.MockCreateChatCompletionErr
}

func (m *MockOpenAIClient) CreateEmbeddings(ctx context.Context, conv openai.EmbeddingRequestConverter) (res openai.EmbeddingResponse, err error) {
return m.MockCreateEmbeddingsResp, m.MockCreateChatCompletionErr
}

func TestFormatAPIKey(t *testing.T) {
dummyAPIKey := "sk-12345678900abcdefghijklmnop"
formattedKey := FormatAPIKey(dummyAPIKey)
if formattedKey != "sk-mnop" {
t.Errorf("Formatted API key is incorrect: expected: %s actual: %s", "sk-mnop", formattedKey)

}
}
func TestDefaultConfig(t *testing.T) {
dummyAPIKey := "sk-12345678900abcdefghijklmnop"
cfg := NRDefaultConfig(dummyAPIKey)
// Default Values
if cfg.LicenseKeyLastFour != "sk-mnop" {
t.Errorf("API Key is incorrect: expected: %s actual: %s", "sk-mnop", cfg.LicenseKeyLastFour)
}
if cfg.Config.OrgID != "" {
t.Errorf("OrgID is incorrect: expected: %s actual: %s", "", cfg.Config.OrgID)
}
// Default Value set by openai package
if cfg.Config.APIType != openai.APITypeOpenAI {
t.Errorf("API Type is incorrect: expected: %s actual: %s", openai.APITypeOpenAI, cfg.Config.APIType)
}
}

func TestDefaultConfigAzure(t *testing.T) {
dummyAPIKey := "sk-12345678900abcdefghijklmnop"
baseURL := "https://azure-base-url.com"
cfg := NRDefaultAzureConfig(dummyAPIKey, baseURL)
// Default Values
if cfg.LicenseKeyLastFour != "sk-mnop" {
t.Errorf("API Key is incorrect: expected: %s actual: %s", "sk-mnop", cfg.LicenseKeyLastFour)
}
if cfg.Config.BaseURL != baseURL {
t.Errorf("baseURL is incorrect: expected: %s actual: %s", baseURL, cfg.Config.BaseURL)
}
// Default Value set by openai package
if cfg.Config.APIType != openai.APITypeAzure {
t.Errorf("API Type is incorrect: expected: %s actual: %s", openai.APITypeAzure, cfg.Config.APIType)
}
}

func TestNRNewClient(t *testing.T) {
dummyAPIKey := "sk-12345678900abcdefghijklmnop"
client := NRNewClient(dummyAPIKey)
if client.LicenseKeyLastFour != "sk-mnop" {
t.Errorf("API Key is incorrect: expected: %s actual: %s", "sk-mnop", client.LicenseKeyLastFour)
}
}

func TestNRNewClientWithConfigs(t *testing.T) {
// Regular Config
dummyAPIKey := "sk-12345678900abcdefghijklmnop"
cfg := NRDefaultConfig(dummyAPIKey)
client := NRNewClientWithConfig(cfg)
if client.LicenseKeyLastFour != "sk-mnop" {
t.Errorf("API Key is incorrect: expected: %s actual: %s", "sk-mnop", client.LicenseKeyLastFour)
}
// Azure Config
baseURL := "https://azure-base-url.com"
azureCfg := NRDefaultAzureConfig(dummyAPIKey, baseURL)
azureClient := NRNewClientWithConfig(azureCfg)
if azureClient.LicenseKeyLastFour != "sk-mnop" {
t.Errorf("API Key is incorrect: expected: %s actual: %s", "sk-mnop", azureClient.LicenseKeyLastFour)
}
if azureCfg.Config.BaseURL != baseURL {
t.Errorf("baseURL is incorrect: expected: %s actual: %s", baseURL, azureCfg.Config.BaseURL)
}
// Default Value set by openai package
if azureCfg.Config.APIType != openai.APITypeAzure {
t.Errorf("API Type is incorrect: expected: %s actual: %s", openai.APITypeAzure, azureCfg.Config.APIType)
}
}

func TestAddCustomAttributes(t *testing.T) {
client := NRNewClient("sk-12345678900abcdefghijklmnop")
client.AddCustomAttributes(map[string]interface{}{
"llm.foo": "bar",
"ll.pi": 3.14,
})
}

func TestNRCreateChatCompletion(t *testing.T) {
mockClient := &MockOpenAIClient{}
cw := &ClientWrapper{
Client: mockClient,
LicenseKeyLastFour: "sk-mnop",
}
req := openai.ChatCompletionRequest{
Model: openai.GPT3Dot5Turbo,
Temperature: 0,
MaxTokens: 150,
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleUser,
Content: "What is 8*5",
},
},
}
app := integrationsupport.NewTestApp(nil, newrelic.ConfigAIMonitoringEnabled(true))
resp, err := NRCreateChatCompletion(cw, req, app.Application)
if err != nil {
t.Error(err)
}
if resp.ChatCompletionResponse.Choices[0].Message.Content != "\n\nHello there, how may I assist you today?" {
t.Errorf("Chat completion response is incorrect: expected: %s actual: %s", "\n\nHello there, how may I assist you today?", resp.ChatCompletionResponse.Choices[0].Message.Content)
}
app.ExpectCustomEvents(t, []internal.WantEvent{
{
Intrinsics: map[string]interface{}{
"type": "LlmChatCompletionSummary",
"timestamp": internal.MatchAnything,
},
UserAttributes: map[string]interface{}{
"ingest_source": "Go",
"vendor": "OpenAI",
"model": "gpt-3.5-turbo",
"id": internal.MatchAnything,
"transaction_id": internal.MatchAnything,
"trace_id": internal.MatchAnything,
"span_id": internal.MatchAnything,
"appName": "my app",
"request.temperature": 0,
"api_key_last_four_digits": "sk-mnop",
"request_id": "chatcmpl-123",
"request.model": "gpt-3.5-turbo",
"request.max_tokens": 150,
"response.number_of_messages": 1,
"response.headers.llmVersion": "2020-10-01",
"response.organization": "user-123",
"response.usage.completion_tokens": 12,
"response.model": "gpt-3.5-turbo",
"response.usage.total_tokens": 21,
"response.usage.prompt_tokens": 9,
"response.headers.ratelimitRemainingTokens": "100",
"response.headers.ratelimitRemainingRequests": "10000",
"response.headers.ratelimitResetTokens": "100",
"response.headers.ratelimitResetRequests": "10000",
"response.headers.ratelimitLimitTokens": "100",
"response.headers.ratelimitLimitRequests": "10000",
},
},
{
Intrinsics: map[string]interface{}{
"type": "LlmChatCompletionMessage",
"timestamp": internal.MatchAnything,
},
UserAttributes: map[string]interface{}{
"trace_id": internal.MatchAnything,
"transaction_id": internal.MatchAnything,
"span_id": internal.MatchAnything,
"id": "chatcmpl-123",
"sequence": 0,
"role": "assistant",
"content": "\n\nHello there, how may I assist you today?",
"request_id": "chatcmpl-123",
"vendor": "openai",
"ingest_source": "go",
"response.model": "gpt-3.5-turbo",
},
},
})

}

0 comments on commit db50304

Please sign in to comment.