Skip to content

Commit

Permalink
Merge branch 'develop-ai-limited-preview' into add-meta-llama-model-s…
Browse files Browse the repository at this point in the history
…upport
  • Loading branch information
umaannamalai authored Dec 19, 2023
2 parents a0f48b0 + d74a2d7 commit 368225f
Show file tree
Hide file tree
Showing 9 changed files with 400 additions and 35 deletions.
7 changes: 7 additions & 0 deletions newrelic/hooks/external_botocore.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,6 +584,12 @@ def _nr_clientcreator__create_api_method_(wrapped, instance, args, kwargs):
return tracer(wrapped)


def _nr_clientcreator__create_methods(wrapped, instance, args, kwargs):
class_attributes = wrapped(*args, **kwargs)
class_attributes["_nr_wrapped"] = True
return class_attributes


def _bind_make_request_params(operation_model, request_dict, *args, **kwargs):
return operation_model, request_dict

Expand Down Expand Up @@ -614,3 +620,4 @@ def instrument_botocore_endpoint(module):

def instrument_botocore_client(module):
wrap_function_wrapper(module, "ClientCreator._create_api_method", _nr_clientcreator__create_api_method_)
wrap_function_wrapper(module, "ClientCreator._create_methods", _nr_clientcreator__create_methods)
107 changes: 77 additions & 30 deletions newrelic/hooks/mlmodel_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,19 +59,37 @@ def wrap_embedding_sync(wrapped, instance, args, kwargs):
try:
response = wrapped(*args, **kwargs)
except Exception as exc:
notice_error_attributes = {
"http.statusCode": getattr(exc, "http_status", ""),
"error.message": getattr(exc, "_message", ""),
"error.code": getattr(getattr(exc, "error", ""), "code", ""),
"error.param": getattr(exc, "param", ""),
"embedding_id": embedding_id,
}
exc._nr_message = notice_error_attributes.pop("error.message")
if OPENAI_V1:
response = getattr(exc, "response", "")
response_headers = getattr(response, "headers", "")
exc_organization = response_headers.get("openai-organization", "") if response_headers else ""
# There appears to be a bug here in openai v1 where despite having code,
# param, etc in the error response, they are not populated on the exception
# object so grab them from the response body object instead.
body = getattr(exc, "body", {}) or {}
notice_error_attributes = {
"http.statusCode": getattr(exc, "status_code", "") or "",
"error.message": body.get("message", "") or "",
"error.code": body.get("code", "") or "",
"error.param": body.get("param", "") or "",
"embedding_id": embedding_id,
}
else:
exc_organization = getattr(exc, "organization", "")
notice_error_attributes = {
"http.statusCode": getattr(exc, "http_status", ""),
"error.message": getattr(exc, "_message", ""),
"error.code": getattr(getattr(exc, "error", ""), "code", ""),
"error.param": getattr(exc, "param", ""),
"embedding_id": embedding_id,
}
message = notice_error_attributes.pop("error.message")
if message:
exc._nr_message = message
ft.notice_error(
attributes=notice_error_attributes,
)
# Gather attributes to add to embedding summary event in error context
exc_organization = getattr(exc, "organization", "")

error_embedding_dict = {
"id": embedding_id,
"appName": settings.app_name,
Expand Down Expand Up @@ -498,19 +516,37 @@ async def wrap_embedding_async(wrapped, instance, args, kwargs):
try:
response = await wrapped(*args, **kwargs)
except Exception as exc:
notice_error_attributes = {
"http.statusCode": getattr(exc, "http_status", ""),
"error.message": getattr(exc, "_message", ""),
"error.code": getattr(getattr(exc, "error", ""), "code", ""),
"error.param": getattr(exc, "param", ""),
"embedding_id": embedding_id,
}
exc._nr_message = notice_error_attributes.pop("error.message")
if OPENAI_V1:
response = getattr(exc, "response", "")
response_headers = getattr(response, "headers", "")
exc_organization = response_headers.get("openai-organization", "") if response_headers else ""
# There appears to be a bug here in openai v1 where despite having code,
# param, etc in the error response, they are not populated on the exception
# object so grab them from the response body object instead.
body = getattr(exc, "body", {}) or {}
notice_error_attributes = {
"http.statusCode": getattr(exc, "status_code", "") or "",
"error.message": body.get("message", "") or "",
"error.code": body.get("code", "") or "",
"error.param": body.get("param", "") or "",
"embedding_id": embedding_id,
}
else:
exc_organization = getattr(exc, "organization", "")
notice_error_attributes = {
"http.statusCode": getattr(exc, "http_status", ""),
"error.message": getattr(exc, "_message", ""),
"error.code": getattr(getattr(exc, "error", ""), "code", ""),
"error.param": getattr(exc, "param", ""),
"embedding_id": embedding_id,
}
message = notice_error_attributes.pop("error.message")
if message:
exc._nr_message = message
ft.notice_error(
attributes=notice_error_attributes,
)
# Gather attributes to add to embedding summary event in error context
exc_organization = getattr(exc, "organization", "")

error_embedding_dict = {
"id": embedding_id,
"appName": settings.app_name,
Expand Down Expand Up @@ -834,21 +870,33 @@ def wrap_base_client_process_response(wrapped, instance, args, kwargs):


def instrument_openai_util(module):
wrap_function_wrapper(module, "convert_to_openai_object", wrap_convert_to_openai_object)
if hasattr(module, "convert_to_openai_object"):
wrap_function_wrapper(module, "convert_to_openai_object", wrap_convert_to_openai_object)
# This is to mark where we instrument so the SDK knows not to instrument them
# again.
setattr(module.convert_to_openai_object, "_nr_wrapped", True)


def instrument_openai_api_resources_embedding(module):
if hasattr(module.Embedding, "create"):
wrap_function_wrapper(module, "Embedding.create", wrap_embedding_sync)
if hasattr(module.Embedding, "acreate"):
wrap_function_wrapper(module, "Embedding.acreate", wrap_embedding_async)
if hasattr(module, "Embedding"):
if hasattr(module.Embedding, "create"):
wrap_function_wrapper(module, "Embedding.create", wrap_embedding_sync)
if hasattr(module.Embedding, "acreate"):
wrap_function_wrapper(module, "Embedding.acreate", wrap_embedding_async)
# This is to mark where we instrument so the SDK knows not to instrument them
# again.
setattr(module.Embedding, "_nr_wrapped", True)


def instrument_openai_api_resources_chat_completion(module):
if hasattr(module.ChatCompletion, "create"):
wrap_function_wrapper(module, "ChatCompletion.create", wrap_chat_completion_sync)
if hasattr(module.ChatCompletion, "acreate"):
wrap_function_wrapper(module, "ChatCompletion.acreate", wrap_chat_completion_async)
if hasattr(module, "ChatCompletion"):
if hasattr(module.ChatCompletion, "create"):
wrap_function_wrapper(module, "ChatCompletion.create", wrap_chat_completion_sync)
if hasattr(module.ChatCompletion, "acreate"):
wrap_function_wrapper(module, "ChatCompletion.acreate", wrap_chat_completion_async)
# This is to mark where we instrument so the SDK knows not to instrument them
# again.
setattr(module.ChatCompletion, "_nr_wrapped", True)


def instrument_openai_resources_chat_completions(module):
Expand All @@ -858,7 +906,6 @@ def instrument_openai_resources_chat_completions(module):
wrap_function_wrapper(module, "AsyncCompletions.create", wrap_chat_completion_async)


# OpenAI v1 instrumentation points
def instrument_openai_resources_embeddings(module):
if hasattr(module, "Embeddings"):
if hasattr(module.Embeddings, "create"):
Expand Down
4 changes: 4 additions & 0 deletions tests/external_botocore/test_bedrock_chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,3 +288,7 @@ def _test():
exercise_model(prompt="Invalid Token", temperature=0.7, max_tokens=100)

_test()


def test_bedrock_chat_completion_functions_marked_as_wrapped_for_sdk_compatibility(bedrock_server):
assert bedrock_server._nr_wrapped
6 changes: 5 additions & 1 deletion tests/external_botocore/test_bedrock_embeddings.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2010 New Relic, Inc.
# Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -172,3 +172,7 @@ def _test():
exercise_model(prompt="Invalid Token", temperature=0.7, max_tokens=100)

_test()


def test_bedrock_chat_completion_functions_marked_as_wrapped_for_sdk_compatibility(bedrock_server):
assert bedrock_server._nr_wrapped
1 change: 0 additions & 1 deletion tests/mlmodel_openai/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@
"test_chat_completion.py",
"test_get_llm_message_ids.py",
"test_chat_completion_error.py",
"test_embeddings_error_v1.py",
]


Expand Down
5 changes: 5 additions & 0 deletions tests/mlmodel_openai/test_chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,3 +371,8 @@ def test_openai_chat_completion_async_disabled_custom_event_settings(loop):
model="gpt-3.5-turbo", messages=_test_openai_chat_completion_messages, temperature=0.7, max_tokens=100
)
)


def test_openai_chat_completion_functions_marked_as_wrapped_for_sdk_compatibility():
assert openai.ChatCompletion._nr_wrapped
assert openai.util.convert_to_openai_object._nr_wrapped
5 changes: 5 additions & 0 deletions tests/mlmodel_openai/test_embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,3 +148,8 @@ def test_openai_embedding_async_disabled_custom_insights_events(loop):
loop.run_until_complete(
openai.Embedding.acreate(input="This is an embedding test.", model="text-embedding-ada-002")
)


def test_openai_embedding_functions_marked_as_wrapped_for_sdk_compatibility():
assert openai.Embedding._nr_wrapped
assert openai.util.convert_to_openai_object._nr_wrapped
Loading

0 comments on commit 368225f

Please sign in to comment.