mirror of
https://github.com/open-telemetry/opentelemetry-python-contrib.git
synced 2025-08-02 02:52:18 +08:00
Add Support for Async
openai instrumentation (#2984)
This commit is contained in:
1
.github/component_owners.yml
vendored
1
.github/component_owners.yml
vendored
@ -73,4 +73,5 @@ components:
|
||||
- lzchen
|
||||
- gyliu513
|
||||
- nirga
|
||||
- alizenhom
|
||||
- codefromthecrypt
|
||||
|
@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## Unreleased
|
||||
|
||||
- Support for `AsyncOpenAI/AsyncCompletions` ([#2984](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/2984))
|
||||
|
||||
## Version 2.0b0 (2024-11-08)
|
||||
|
||||
- Use generic `OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT` environment variable
|
||||
|
@ -52,7 +52,7 @@ from opentelemetry.instrumentation.utils import unwrap
|
||||
from opentelemetry.semconv.schemas import Schemas
|
||||
from opentelemetry.trace import get_tracer
|
||||
|
||||
from .patch import chat_completions_create
|
||||
from .patch import async_chat_completions_create, chat_completions_create
|
||||
|
||||
|
||||
class OpenAIInstrumentor(BaseInstrumentor):
|
||||
@ -84,7 +84,16 @@ class OpenAIInstrumentor(BaseInstrumentor):
|
||||
),
|
||||
)
|
||||
|
||||
wrap_function_wrapper(
|
||||
module="openai.resources.chat.completions",
|
||||
name="AsyncCompletions.create",
|
||||
wrapper=async_chat_completions_create(
|
||||
tracer, event_logger, is_content_enabled()
|
||||
),
|
||||
)
|
||||
|
||||
def _uninstrument(self, **kwargs):
|
||||
import openai # pylint: disable=import-outside-toplevel
|
||||
|
||||
unwrap(openai.resources.chat.completions.Completions, "create")
|
||||
unwrap(openai.resources.chat.completions.AsyncCompletions, "create")
|
||||
|
@ -21,15 +21,12 @@ from opentelemetry._events import Event, EventLogger
|
||||
from opentelemetry.semconv._incubating.attributes import (
|
||||
gen_ai_attributes as GenAIAttributes,
|
||||
)
|
||||
from opentelemetry.semconv.attributes import (
|
||||
error_attributes as ErrorAttributes,
|
||||
)
|
||||
from opentelemetry.trace import Span, SpanKind, Tracer
|
||||
from opentelemetry.trace.status import Status, StatusCode
|
||||
|
||||
from .utils import (
|
||||
choice_to_event,
|
||||
get_llm_request_attributes,
|
||||
handle_span_exception,
|
||||
is_streaming,
|
||||
message_to_event,
|
||||
set_span_attribute,
|
||||
@ -72,12 +69,49 @@ def chat_completions_create(
|
||||
return result
|
||||
|
||||
except Exception as error:
|
||||
span.set_status(Status(StatusCode.ERROR, str(error)))
|
||||
handle_span_exception(span, error)
|
||||
raise
|
||||
|
||||
return traced_method
|
||||
|
||||
|
||||
def async_chat_completions_create(
|
||||
tracer: Tracer, event_logger: EventLogger, capture_content: bool
|
||||
):
|
||||
"""Wrap the `create` method of the `AsyncChatCompletion` class to trace it."""
|
||||
|
||||
async def traced_method(wrapped, instance, args, kwargs):
|
||||
span_attributes = {**get_llm_request_attributes(kwargs, instance)}
|
||||
|
||||
span_name = f"{span_attributes[GenAIAttributes.GEN_AI_OPERATION_NAME]} {span_attributes[GenAIAttributes.GEN_AI_REQUEST_MODEL]}"
|
||||
with tracer.start_as_current_span(
|
||||
name=span_name,
|
||||
kind=SpanKind.CLIENT,
|
||||
attributes=span_attributes,
|
||||
end_on_exit=False,
|
||||
) as span:
|
||||
if span.is_recording():
|
||||
for message in kwargs.get("messages", []):
|
||||
event_logger.emit(
|
||||
message_to_event(message, capture_content)
|
||||
)
|
||||
|
||||
try:
|
||||
result = await wrapped(*args, **kwargs)
|
||||
if is_streaming(kwargs):
|
||||
return StreamWrapper(
|
||||
result, span, event_logger, capture_content
|
||||
)
|
||||
|
||||
if span.is_recording():
|
||||
span.set_attribute(
|
||||
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
|
||||
_set_response_attributes(
|
||||
span, result, event_logger, capture_content
|
||||
)
|
||||
span.end()
|
||||
return result
|
||||
|
||||
except Exception as error:
|
||||
handle_span_exception(span, error)
|
||||
raise
|
||||
|
||||
return traced_method
|
||||
@ -286,10 +320,19 @@ class StreamWrapper:
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
try:
|
||||
if exc_type is not None:
|
||||
self.span.set_status(Status(StatusCode.ERROR, str(exc_val)))
|
||||
self.span.set_attribute(
|
||||
ErrorAttributes.ERROR_TYPE, exc_type.__qualname__
|
||||
)
|
||||
handle_span_exception(self.span, exc_val)
|
||||
finally:
|
||||
self.cleanup()
|
||||
return False # Propagate the exception
|
||||
|
||||
async def __aenter__(self):
|
||||
self.setup()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
try:
|
||||
if exc_type is not None:
|
||||
handle_span_exception(self.span, exc_val)
|
||||
finally:
|
||||
self.cleanup()
|
||||
return False # Propagate the exception
|
||||
@ -301,6 +344,9 @@ class StreamWrapper:
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
try:
|
||||
chunk = next(self.stream)
|
||||
@ -310,10 +356,20 @@ class StreamWrapper:
|
||||
self.cleanup()
|
||||
raise
|
||||
except Exception as error:
|
||||
self.span.set_status(Status(StatusCode.ERROR, str(error)))
|
||||
self.span.set_attribute(
|
||||
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
|
||||
)
|
||||
handle_span_exception(self.span, error)
|
||||
self.cleanup()
|
||||
raise
|
||||
|
||||
async def __anext__(self):
|
||||
try:
|
||||
chunk = await self.stream.__anext__()
|
||||
self.process_chunk(chunk)
|
||||
return chunk
|
||||
except StopAsyncIteration:
|
||||
self.cleanup()
|
||||
raise
|
||||
except Exception as error:
|
||||
handle_span_exception(self.span, error)
|
||||
self.cleanup()
|
||||
raise
|
||||
|
||||
|
@ -26,6 +26,10 @@ from opentelemetry.semconv._incubating.attributes import (
|
||||
from opentelemetry.semconv._incubating.attributes import (
|
||||
server_attributes as ServerAttributes,
|
||||
)
|
||||
from opentelemetry.semconv.attributes import (
|
||||
error_attributes as ErrorAttributes,
|
||||
)
|
||||
from opentelemetry.trace.status import Status, StatusCode
|
||||
|
||||
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT = (
|
||||
"OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT"
|
||||
@ -138,9 +142,11 @@ def choice_to_event(choice, capture_content):
|
||||
|
||||
if choice.message:
|
||||
message = {
|
||||
"role": choice.message.role
|
||||
if choice.message and choice.message.role
|
||||
else None
|
||||
"role": (
|
||||
choice.message.role
|
||||
if choice.message and choice.message.role
|
||||
else None
|
||||
)
|
||||
}
|
||||
tool_calls = extract_tool_calls(choice.message, capture_content)
|
||||
if tool_calls:
|
||||
@ -210,3 +216,12 @@ def get_llm_request_attributes(
|
||||
|
||||
# filter out None values
|
||||
return {k: v for k, v in attributes.items() if v is not None}
|
||||
|
||||
|
||||
def handle_span_exception(span, error):
|
||||
span.set_status(Status(StatusCode.ERROR, str(error)))
|
||||
if span.is_recording():
|
||||
span.set_attribute(
|
||||
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
|
||||
)
|
||||
span.end()
|
||||
|
@ -5,6 +5,7 @@ importlib-metadata==6.11.0
|
||||
packaging==24.0
|
||||
pytest==7.4.4
|
||||
pytest-vcr==1.0.2
|
||||
pytest-asyncio==0.21.0
|
||||
wrapt==1.16.0
|
||||
opentelemetry-api==1.28 # when updating, also update in pyproject.toml
|
||||
opentelemetry-sdk==1.28 # when updating, also update in pyproject.toml
|
||||
|
@ -5,6 +5,7 @@ importlib-metadata==6.11.0
|
||||
packaging==24.0
|
||||
pytest==7.4.4
|
||||
pytest-vcr==1.0.2
|
||||
pytest-asyncio==0.21.0
|
||||
wrapt==1.16.0
|
||||
# test with the latest version of opentelemetry-api, sdk, and semantic conventions
|
||||
|
||||
|
@ -0,0 +1,89 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Say this is a test"
|
||||
}
|
||||
],
|
||||
"model": "this-model-does-not-exist"
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '103'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |-
|
||||
{
|
||||
"error": {
|
||||
"message": "The model `this-model-does-not-exist` does not exist or you do not have access to it.",
|
||||
"type": "invalid_request_error",
|
||||
"param": null,
|
||||
"code": "model_not_found"
|
||||
}
|
||||
}
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80827a861852-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json; charset=utf-8
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:01 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
content-length:
|
||||
- '231'
|
||||
openai-organization: test_openai_org_id
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
vary:
|
||||
- Origin
|
||||
x-request-id:
|
||||
- req_5cf06a7fabd45ebe21ee38c14c5b2f76
|
||||
status:
|
||||
code: 404
|
||||
message: Not Found
|
||||
version: 1
|
@ -0,0 +1,137 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Say this is a test"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini",
|
||||
"max_tokens": 50,
|
||||
"seed": 42,
|
||||
"stream": false,
|
||||
"temperature": 0.5,
|
||||
"service_tier": "default"
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '183'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |-
|
||||
{
|
||||
"id": "chatcmpl-ASv9WMTAMZY4O1EImv3csZa6Ch7KI",
|
||||
"object": "chat.completion",
|
||||
"created": 1731456242,
|
||||
"model": "gpt-4o-mini-2024-07-18",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "This is a test. How can I assist you further?",
|
||||
"refusal": null
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"usage": {
|
||||
"prompt_tokens": 12,
|
||||
"completion_tokens": 12,
|
||||
"total_tokens": 24,
|
||||
"prompt_tokens_details": {
|
||||
"cached_tokens": 0,
|
||||
"audio_tokens": 0
|
||||
},
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
}
|
||||
},
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_0ba0d124f1"
|
||||
}
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a8088f867e167-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:02 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
content-length:
|
||||
- '825'
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '488'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999943'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_6df08d6267415e8f5db3628a6757edad
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,143 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Say this is a test"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini",
|
||||
"n": 2,
|
||||
"stream": false
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '114'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |-
|
||||
{
|
||||
"id": "chatcmpl-ASv9XLlMmT7H3cf50dNTesHDBDwX5",
|
||||
"object": "chat.completion",
|
||||
"created": 1731456243,
|
||||
"model": "gpt-4o-mini-2024-07-18",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "This is a test.",
|
||||
"refusal": null
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "stop"
|
||||
},
|
||||
{
|
||||
"index": 1,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "This is a test.",
|
||||
"refusal": null
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"usage": {
|
||||
"prompt_tokens": 12,
|
||||
"completion_tokens": 10,
|
||||
"total_tokens": 22,
|
||||
"prompt_tokens_details": {
|
||||
"cached_tokens": 0,
|
||||
"audio_tokens": 0
|
||||
},
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
}
|
||||
},
|
||||
"system_fingerprint": "fp_0ba0d124f1"
|
||||
}
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a808f6d8e0d8b-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:04 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
content-length:
|
||||
- '970'
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '306'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999962'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_1317908e0f9b73276b57d4e171c533ea
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,382 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You're a helpful assistant."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini",
|
||||
"n": 2,
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
}
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '254'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |+
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":"I'm"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":"I'm"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" unable"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" unable"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" to"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" to"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" provide"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" provide"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" real"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" real"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":"-time"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":"-time"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" updates"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" updates"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" as"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" as"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" my"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" my"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" knowledge"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" knowledge"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" was"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" only"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" last"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" extends"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" until"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" updated"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" in"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" October"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" October"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" "},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":"202"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":"202"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":"3"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":"1"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" and"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" and"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" don't"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" have"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" access"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" to"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" don't"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" have"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" access"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" to"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" live"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" live"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" data"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" data"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" However"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" However"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" easily"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" easily"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" check"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" check"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" the"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" the"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" current"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" current"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" in"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" for"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" Seattle"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" Seattle"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" and"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" and"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" San"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" San"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" Francisco"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" Francisco"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" by"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" using"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" visiting"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" website"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" website"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" or"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" or"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" using"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" mobile"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" app"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" for"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" app"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" the"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" most"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" If"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" accurate"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" and"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" need"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" up"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" historical"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" information"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" or"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" general"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" climate"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" data"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" for"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" those"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" cities"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" feel"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" free"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" to"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":" ask"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":"-to"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":"-date"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":" information"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":1,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9hB9He94oQyZr1CDC8coqvmn5U","object":"chat.completion.chunk","created":1731456253,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[],"usage":{"prompt_tokens":26,"completion_tokens":133,"total_tokens":159,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
|
||||
|
||||
data: [DONE]
|
||||
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80ceac3ce19a-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:13 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '126'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999945'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_5dd8b6845db59fa55cf226eda1f5a2c6
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,164 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You're a helpful assistant."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini",
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
},
|
||||
"tool_choice": "auto",
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. Boston, MA"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"location"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '602'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |+
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_hqkL24CLEwnniv4GDrjk14Iu","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n\": \"S"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"eatt"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"le, W"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"A\"}"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_0s1enkFttXjIR7ozHoGMcnUu","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"n\": \"S"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"an F"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"ranci"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"sco, C"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"A\"}"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9l0RKJrq2fTx2dK5jhJoIr4rMI","object":"chat.completion.chunk","created":1731456257,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[],"usage":{"prompt_tokens":75,"completion_tokens":51,"total_tokens":126,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
|
||||
|
||||
data: [DONE]
|
||||
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80e4cfb00d86-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:19 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '1597'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999960'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_487aef2347cb4d1f97077c488dd93628
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,164 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You're a helpful assistant."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini",
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
},
|
||||
"tool_choice": "auto",
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. Boston, MA"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"location"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '602'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |+
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_oJL2dc4GjWVxqBtWlGLwjbsR","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n\": \"S"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"eatt"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"le, W"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"A\"}"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_ON3lp1OWsbw2obNRD43KVDp6","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"n\": \"S"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"an F"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"ranci"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"sco, C"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","usage":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"A\"}"}}]},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9jfgm5JguNGaR9o9u94HpuhV7T","object":"chat.completion.chunk","created":1731456255,"model":"gpt-4o-mini-2024-07-18","system_fingerprint":"fp_0ba0d124f1","choices":[],"usage":{"prompt_tokens":75,"completion_tokens":51,"total_tokens":126,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
|
||||
|
||||
data: [DONE]
|
||||
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80d8efb9e1c8-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:16 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '1162'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999960'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_0b6729aef347cecd61ba3b7b7a8d4719
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,117 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Say this is a test"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4",
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
}
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '142'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |+
|
||||
data: {"id":"chatcmpl-ASv9ejXDUtAhGOJJxWuw026zdinc4","object":"chat.completion.chunk","created":1731456250,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9ejXDUtAhGOJJxWuw026zdinc4","object":"chat.completion.chunk","created":1731456250,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"This"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9ejXDUtAhGOJJxWuw026zdinc4","object":"chat.completion.chunk","created":1731456250,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" is"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9ejXDUtAhGOJJxWuw026zdinc4","object":"chat.completion.chunk","created":1731456250,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9ejXDUtAhGOJJxWuw026zdinc4","object":"chat.completion.chunk","created":1731456250,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" test"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9ejXDUtAhGOJJxWuw026zdinc4","object":"chat.completion.chunk","created":1731456250,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9ejXDUtAhGOJJxWuw026zdinc4","object":"chat.completion.chunk","created":1731456250,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9ejXDUtAhGOJJxWuw026zdinc4","object":"chat.completion.chunk","created":1731456250,"model":"gpt-4-0613","system_fingerprint":null,"choices":[],"usage":{"prompt_tokens":12,"completion_tokens":5,"total_tokens":17,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
|
||||
|
||||
data: [DONE]
|
||||
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80bd2f31e1e5-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:11 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '196'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '1000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '999977'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 1ms
|
||||
x-request-id:
|
||||
- req_cc9204ae23338b130df11c8c5b5f31af
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,112 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Say this is a test"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4",
|
||||
"stream": true
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '99'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |+
|
||||
data: {"id":"chatcmpl-ASv9gROIIAvRs9QnmLP8Nzs3PGMCX","object":"chat.completion.chunk","created":1731456252,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9gROIIAvRs9QnmLP8Nzs3PGMCX","object":"chat.completion.chunk","created":1731456252,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"This"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9gROIIAvRs9QnmLP8Nzs3PGMCX","object":"chat.completion.chunk","created":1731456252,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9gROIIAvRs9QnmLP8Nzs3PGMCX","object":"chat.completion.chunk","created":1731456252,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9gROIIAvRs9QnmLP8Nzs3PGMCX","object":"chat.completion.chunk","created":1731456252,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" test"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9gROIIAvRs9QnmLP8Nzs3PGMCX","object":"chat.completion.chunk","created":1731456252,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
data: {"id":"chatcmpl-ASv9gROIIAvRs9QnmLP8Nzs3PGMCX","object":"chat.completion.chunk","created":1731456252,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
data: [DONE]
|
||||
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80c54d00e288-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:12 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '283'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '1000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '999977'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 1ms
|
||||
x-request-id:
|
||||
- req_e9e4ea6fd060391e8cc8cfea78ad9a15
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,342 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You're a helpful assistant."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini",
|
||||
"tool_choice": "auto",
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. Boston, MA"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"location"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '543'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |-
|
||||
{
|
||||
"id": "chatcmpl-ASv9bJqWatpvCC0YMsYRcTSIiXoxk",
|
||||
"object": "chat.completion",
|
||||
"created": 1731456247,
|
||||
"model": "gpt-4o-mini-2024-07-18",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": "call_vwOezSsB5j9ei1SSMlZjqx7g",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"arguments": "{\"location\": \"Seattle, WA\"}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "call_LzeIYcKhHnVF60u4LmBpT1tv",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"arguments": "{\"location\": \"San Francisco, CA\"}"
|
||||
}
|
||||
}
|
||||
],
|
||||
"refusal": null
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
],
|
||||
"usage": {
|
||||
"prompt_tokens": 75,
|
||||
"completion_tokens": 51,
|
||||
"total_tokens": 126,
|
||||
"prompt_tokens_details": {
|
||||
"cached_tokens": 0,
|
||||
"audio_tokens": 0
|
||||
},
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
}
|
||||
},
|
||||
"system_fingerprint": "fp_0ba0d124f1"
|
||||
}
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80a9f8fbe1c9-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:08 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
content-length:
|
||||
- '1308'
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '808'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999960'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_f1b9b75e4a73b542c9b1b992cd52c66f
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You're a helpful assistant."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": "call_vwOezSsB5j9ei1SSMlZjqx7g",
|
||||
"function": {
|
||||
"arguments": "{\"location\": \"Seattle, WA\"}",
|
||||
"name": "get_current_weather"
|
||||
},
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"id": "call_LzeIYcKhHnVF60u4LmBpT1tv",
|
||||
"function": {
|
||||
"arguments": "{\"location\": \"San Francisco, CA\"}",
|
||||
"name": "get_current_weather"
|
||||
},
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"content": "50 degrees and raining",
|
||||
"tool_call_id": "call_vwOezSsB5j9ei1SSMlZjqx7g"
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"content": "70 degrees and sunny",
|
||||
"tool_call_id": "call_LzeIYcKhHnVF60u4LmBpT1tv"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini"
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '746'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- test_cookie
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |-
|
||||
{
|
||||
"id": "chatcmpl-ASv9dfXfIwGCZgeWzDTbCh0FuU9kh",
|
||||
"object": "chat.completion",
|
||||
"created": 1731456249,
|
||||
"model": "gpt-4o-mini-2024-07-18",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "Today, the weather in Seattle is 50 degrees and raining, while in San Francisco, it's 70 degrees and sunny.",
|
||||
"refusal": null
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"usage": {
|
||||
"prompt_tokens": 99,
|
||||
"completion_tokens": 25,
|
||||
"total_tokens": 124,
|
||||
"prompt_tokens_details": {
|
||||
"cached_tokens": 0,
|
||||
"audio_tokens": 0
|
||||
},
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
}
|
||||
},
|
||||
"system_fingerprint": "fp_0ba0d124f1"
|
||||
}
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80b3baade1c9-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:10 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
content-length:
|
||||
- '859'
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '972'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999948'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_754e6b59f1d3da727e2210e3d8c56243
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,342 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You're a helpful assistant."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini",
|
||||
"tool_choice": "auto",
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. Boston, MA"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"location"
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '543'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |-
|
||||
{
|
||||
"id": "chatcmpl-ASv9ZqgNAOJAOLYMgdmxouatKXJlk",
|
||||
"object": "chat.completion",
|
||||
"created": 1731456245,
|
||||
"model": "gpt-4o-mini-2024-07-18",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": "call_O8NOz8VlxosSASEsOY7LDUcP",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"arguments": "{\"location\": \"Seattle, WA\"}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "call_3m7cyuckijnpiWr6tq0Tl8Mg",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"arguments": "{\"location\": \"San Francisco, CA\"}"
|
||||
}
|
||||
}
|
||||
],
|
||||
"refusal": null
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
],
|
||||
"usage": {
|
||||
"prompt_tokens": 75,
|
||||
"completion_tokens": 51,
|
||||
"total_tokens": 126,
|
||||
"prompt_tokens_details": {
|
||||
"cached_tokens": 0,
|
||||
"audio_tokens": 0
|
||||
},
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
}
|
||||
},
|
||||
"system_fingerprint": "fp_0ba0d124f1"
|
||||
}
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a8098ac5ae167-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:06 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
content-length:
|
||||
- '1308'
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '937'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999960'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_3cd7152d2c8c10b4f354b27165f6c2b5
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You're a helpful assistant."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": "call_O8NOz8VlxosSASEsOY7LDUcP",
|
||||
"function": {
|
||||
"arguments": "{\"location\": \"Seattle, WA\"}",
|
||||
"name": "get_current_weather"
|
||||
},
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"id": "call_3m7cyuckijnpiWr6tq0Tl8Mg",
|
||||
"function": {
|
||||
"arguments": "{\"location\": \"San Francisco, CA\"}",
|
||||
"name": "get_current_weather"
|
||||
},
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"content": "50 degrees and raining",
|
||||
"tool_call_id": "call_O8NOz8VlxosSASEsOY7LDUcP"
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"content": "70 degrees and sunny",
|
||||
"tool_call_id": "call_3m7cyuckijnpiWr6tq0Tl8Mg"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini"
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '746'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- test_cookie
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |-
|
||||
{
|
||||
"id": "chatcmpl-ASv9aQnGndy04lqKoPRagym1eEaQK",
|
||||
"object": "chat.completion",
|
||||
"created": 1731456246,
|
||||
"model": "gpt-4o-mini-2024-07-18",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "Today, Seattle is experiencing 50 degrees and raining, while San Francisco has a pleasant 70 degrees and sunny weather.",
|
||||
"refusal": null
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"usage": {
|
||||
"prompt_tokens": 99,
|
||||
"completion_tokens": 24,
|
||||
"total_tokens": 123,
|
||||
"prompt_tokens_details": {
|
||||
"cached_tokens": 0,
|
||||
"audio_tokens": 0
|
||||
},
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
}
|
||||
},
|
||||
"system_fingerprint": "fp_f59a81427f"
|
||||
}
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80a39c71e167-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:04:07 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
content-length:
|
||||
- '871'
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '477'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999948'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_193c74758ea30e77e55afe931e89fd6c
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -0,0 +1,132 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: |-
|
||||
{
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Say this is a test"
|
||||
}
|
||||
],
|
||||
"model": "gpt-4o-mini",
|
||||
"stream": false
|
||||
}
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
authorization:
|
||||
- Bearer test_openai_api_key
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '106'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- AsyncOpenAI/Python 1.26.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- async:asyncio
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.26.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.5
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: |-
|
||||
{
|
||||
"id": "chatcmpl-ASv9R2E7Yhb2e7bj4Xl0qm9s3J42Y",
|
||||
"object": "chat.completion",
|
||||
"created": 1731456237,
|
||||
"model": "gpt-4o-mini-2024-07-18",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "This is a test. How can I assist you further?",
|
||||
"refusal": null
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
],
|
||||
"usage": {
|
||||
"prompt_tokens": 12,
|
||||
"completion_tokens": 12,
|
||||
"total_tokens": 24,
|
||||
"prompt_tokens_details": {
|
||||
"cached_tokens": 0,
|
||||
"audio_tokens": 0
|
||||
},
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
}
|
||||
},
|
||||
"system_fingerprint": "fp_0ba0d124f1"
|
||||
}
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8e1a80679a8311a6-MRS
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Wed, 13 Nov 2024 00:03:58 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie: test_set_cookie
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
access-control-expose-headers:
|
||||
- X-Request-ID
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
content-length:
|
||||
- '796'
|
||||
openai-organization: test_openai_org_id
|
||||
openai-processing-ms:
|
||||
- '359'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
x-ratelimit-limit-requests:
|
||||
- '30000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '150000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '29999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '149999978'
|
||||
x-ratelimit-reset-requests:
|
||||
- 2ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_41ea134c1fc450d4ca4cf8d0c6a7c53a
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
@ -5,7 +5,7 @@ import os
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from openai import OpenAI
|
||||
from openai import AsyncOpenAI, OpenAI
|
||||
|
||||
from opentelemetry.instrumentation.openai_v2 import OpenAIInstrumentor
|
||||
from opentelemetry.instrumentation.openai_v2.utils import (
|
||||
@ -63,6 +63,11 @@ def openai_client():
|
||||
return OpenAI()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def async_openai_client():
|
||||
return AsyncOpenAI()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def vcr_config():
|
||||
return {
|
||||
|
@ -0,0 +1,847 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
from openai import APIConnectionError, AsyncOpenAI, NotFoundError
|
||||
from openai.resources.chat.completions import ChatCompletion
|
||||
|
||||
from opentelemetry.sdk.trace import ReadableSpan
|
||||
from opentelemetry.semconv._incubating.attributes import (
|
||||
error_attributes as ErrorAttributes,
|
||||
)
|
||||
from opentelemetry.semconv._incubating.attributes import (
|
||||
event_attributes as EventAttributes,
|
||||
)
|
||||
from opentelemetry.semconv._incubating.attributes import (
|
||||
gen_ai_attributes as GenAIAttributes,
|
||||
)
|
||||
from opentelemetry.semconv._incubating.attributes import (
|
||||
server_attributes as ServerAttributes,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_with_content(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_with_content
|
||||
):
|
||||
llm_model_value = "gpt-4o-mini"
|
||||
messages_value = [{"role": "user", "content": "Say this is a test"}]
|
||||
|
||||
response = await async_openai_client.chat.completions.create(
|
||||
messages=messages_value, model=llm_model_value, stream=False
|
||||
)
|
||||
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert_completion_attributes(spans[0], llm_model_value, response)
|
||||
|
||||
logs = log_exporter.get_finished_logs()
|
||||
assert len(logs) == 2
|
||||
|
||||
user_message = {"content": messages_value[0]["content"]}
|
||||
assert_message_in_logs(
|
||||
logs[0], "gen_ai.user.message", user_message, spans[0]
|
||||
)
|
||||
|
||||
choice_event = {
|
||||
"index": 0,
|
||||
"finish_reason": "stop",
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": response.choices[0].message.content,
|
||||
},
|
||||
}
|
||||
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event, spans[0])
|
||||
|
||||
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_bad_endpoint(
|
||||
span_exporter, instrument_no_content
|
||||
):
|
||||
llm_model_value = "gpt-4o-mini"
|
||||
messages_value = [{"role": "user", "content": "Say this is a test"}]
|
||||
|
||||
client = AsyncOpenAI(base_url="http://localhost:4242")
|
||||
|
||||
with pytest.raises(APIConnectionError):
|
||||
await client.chat.completions.create(
|
||||
messages=messages_value,
|
||||
model=llm_model_value,
|
||||
timeout=0.1,
|
||||
)
|
||||
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert_all_attributes(
|
||||
spans[0], llm_model_value, server_address="localhost"
|
||||
)
|
||||
assert 4242 == spans[0].attributes[ServerAttributes.SERVER_PORT]
|
||||
assert (
|
||||
"APIConnectionError" == spans[0].attributes[ErrorAttributes.ERROR_TYPE]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_404(
|
||||
span_exporter, async_openai_client, instrument_no_content
|
||||
):
|
||||
llm_model_value = "this-model-does-not-exist"
|
||||
messages_value = [{"role": "user", "content": "Say this is a test"}]
|
||||
|
||||
with pytest.raises(NotFoundError):
|
||||
await async_openai_client.chat.completions.create(
|
||||
messages=messages_value,
|
||||
model=llm_model_value,
|
||||
)
|
||||
|
||||
spans = span_exporter.get_finished_spans()
|
||||
|
||||
assert_all_attributes(spans[0], llm_model_value)
|
||||
assert "NotFoundError" == spans[0].attributes[ErrorAttributes.ERROR_TYPE]
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_extra_params(
|
||||
span_exporter, async_openai_client, instrument_no_content
|
||||
):
|
||||
llm_model_value = "gpt-4o-mini"
|
||||
messages_value = [{"role": "user", "content": "Say this is a test"}]
|
||||
|
||||
response = await async_openai_client.chat.completions.create(
|
||||
messages=messages_value,
|
||||
model=llm_model_value,
|
||||
seed=42,
|
||||
temperature=0.5,
|
||||
max_tokens=50,
|
||||
stream=False,
|
||||
extra_body={"service_tier": "default"},
|
||||
)
|
||||
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert_completion_attributes(spans[0], llm_model_value, response)
|
||||
assert (
|
||||
spans[0].attributes[GenAIAttributes.GEN_AI_OPENAI_REQUEST_SEED] == 42
|
||||
)
|
||||
assert (
|
||||
spans[0].attributes[GenAIAttributes.GEN_AI_REQUEST_TEMPERATURE] == 0.5
|
||||
)
|
||||
assert spans[0].attributes[GenAIAttributes.GEN_AI_REQUEST_MAX_TOKENS] == 50
|
||||
assert (
|
||||
spans[0].attributes[GenAIAttributes.GEN_AI_OPENAI_REQUEST_SERVICE_TIER]
|
||||
== "default"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_multiple_choices(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_with_content
|
||||
):
|
||||
llm_model_value = "gpt-4o-mini"
|
||||
messages_value = [{"role": "user", "content": "Say this is a test"}]
|
||||
|
||||
response = await async_openai_client.chat.completions.create(
|
||||
messages=messages_value, model=llm_model_value, n=2, stream=False
|
||||
)
|
||||
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert_completion_attributes(spans[0], llm_model_value, response)
|
||||
|
||||
logs = log_exporter.get_finished_logs()
|
||||
assert len(logs) == 3 # 1 user message + 2 choice messages
|
||||
|
||||
user_message = {"content": messages_value[0]["content"]}
|
||||
assert_message_in_logs(
|
||||
logs[0], "gen_ai.user.message", user_message, spans[0]
|
||||
)
|
||||
|
||||
choice_event_0 = {
|
||||
"index": 0,
|
||||
"finish_reason": "stop",
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": response.choices[0].message.content,
|
||||
},
|
||||
}
|
||||
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event_0, spans[0])
|
||||
|
||||
choice_event_1 = {
|
||||
"index": 1,
|
||||
"finish_reason": "stop",
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": response.choices[1].message.content,
|
||||
},
|
||||
}
|
||||
assert_message_in_logs(logs[2], "gen_ai.choice", choice_event_1, spans[0])
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_tool_calls_with_content(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_with_content
|
||||
):
|
||||
await chat_completion_tool_call(
|
||||
span_exporter, log_exporter, async_openai_client, True
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_tool_calls_no_content(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_no_content
|
||||
):
|
||||
await chat_completion_tool_call(
|
||||
span_exporter, log_exporter, async_openai_client, False
|
||||
)
|
||||
|
||||
|
||||
async def chat_completion_tool_call(
|
||||
span_exporter, log_exporter, async_openai_client, expect_content
|
||||
):
|
||||
llm_model_value = "gpt-4o-mini"
|
||||
messages_value = [
|
||||
{"role": "system", "content": "You're a helpful assistant."},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?",
|
||||
},
|
||||
]
|
||||
|
||||
response_0 = await async_openai_client.chat.completions.create(
|
||||
messages=messages_value,
|
||||
model=llm_model_value,
|
||||
tool_choice="auto",
|
||||
tools=[get_current_weather_tool_definition()],
|
||||
)
|
||||
|
||||
# sanity check
|
||||
assert "tool_calls" in response_0.choices[0].finish_reason
|
||||
|
||||
# final request
|
||||
messages_value.append(
|
||||
{
|
||||
"role": "assistant",
|
||||
"tool_calls": response_0.choices[0].message.to_dict()[
|
||||
"tool_calls"
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
tool_call_result_0 = {
|
||||
"role": "tool",
|
||||
"content": "50 degrees and raining",
|
||||
"tool_call_id": response_0.choices[0].message.tool_calls[0].id,
|
||||
}
|
||||
tool_call_result_1 = {
|
||||
"role": "tool",
|
||||
"content": "70 degrees and sunny",
|
||||
"tool_call_id": response_0.choices[0].message.tool_calls[1].id,
|
||||
}
|
||||
|
||||
messages_value.append(tool_call_result_0)
|
||||
messages_value.append(tool_call_result_1)
|
||||
|
||||
response_1 = await async_openai_client.chat.completions.create(
|
||||
messages=messages_value, model=llm_model_value
|
||||
)
|
||||
|
||||
# sanity check
|
||||
assert "stop" in response_1.choices[0].finish_reason
|
||||
|
||||
# validate both calls
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert len(spans) == 2
|
||||
assert_completion_attributes(spans[0], llm_model_value, response_0)
|
||||
assert_completion_attributes(spans[1], llm_model_value, response_1)
|
||||
|
||||
logs = log_exporter.get_finished_logs()
|
||||
assert len(logs) == 9 # 3 logs for first completion, 6 for second
|
||||
|
||||
# call one
|
||||
system_message = (
|
||||
{"content": messages_value[0]["content"]} if expect_content else None
|
||||
)
|
||||
assert_message_in_logs(
|
||||
logs[0], "gen_ai.system.message", system_message, spans[0]
|
||||
)
|
||||
|
||||
user_message = (
|
||||
{"content": messages_value[1]["content"]} if expect_content else None
|
||||
)
|
||||
assert_message_in_logs(
|
||||
logs[1], "gen_ai.user.message", user_message, spans[0]
|
||||
)
|
||||
|
||||
function_call_0 = {"name": "get_current_weather"}
|
||||
function_call_1 = {"name": "get_current_weather"}
|
||||
if expect_content:
|
||||
function_call_0["arguments"] = (
|
||||
response_0.choices[0]
|
||||
.message.tool_calls[0]
|
||||
.function.arguments.replace("\n", "")
|
||||
)
|
||||
function_call_1["arguments"] = (
|
||||
response_0.choices[0]
|
||||
.message.tool_calls[1]
|
||||
.function.arguments.replace("\n", "")
|
||||
)
|
||||
|
||||
choice_event = {
|
||||
"index": 0,
|
||||
"finish_reason": "tool_calls",
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": response_0.choices[0].message.tool_calls[0].id,
|
||||
"type": "function",
|
||||
"function": function_call_0,
|
||||
},
|
||||
{
|
||||
"id": response_0.choices[0].message.tool_calls[1].id,
|
||||
"type": "function",
|
||||
"function": function_call_1,
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
assert_message_in_logs(logs[2], "gen_ai.choice", choice_event, spans[0])
|
||||
|
||||
# call two
|
||||
system_message = (
|
||||
{"content": messages_value[0]["content"]} if expect_content else None
|
||||
)
|
||||
assert_message_in_logs(
|
||||
logs[3], "gen_ai.system.message", system_message, spans[1]
|
||||
)
|
||||
|
||||
user_message = (
|
||||
{"content": messages_value[1]["content"]} if expect_content else None
|
||||
)
|
||||
assert_message_in_logs(
|
||||
logs[4], "gen_ai.user.message", user_message, spans[1]
|
||||
)
|
||||
|
||||
assistant_tool_call = {"tool_calls": messages_value[2]["tool_calls"]}
|
||||
if not expect_content:
|
||||
assistant_tool_call["tool_calls"][0]["function"]["arguments"] = None
|
||||
assistant_tool_call["tool_calls"][1]["function"]["arguments"] = None
|
||||
|
||||
assert_message_in_logs(
|
||||
logs[5], "gen_ai.assistant.message", assistant_tool_call, spans[1]
|
||||
)
|
||||
|
||||
tool_message_0 = {
|
||||
"id": tool_call_result_0["tool_call_id"],
|
||||
"content": tool_call_result_0["content"] if expect_content else None,
|
||||
}
|
||||
|
||||
assert_message_in_logs(
|
||||
logs[6], "gen_ai.tool.message", tool_message_0, spans[1]
|
||||
)
|
||||
|
||||
tool_message_1 = {
|
||||
"id": tool_call_result_1["tool_call_id"],
|
||||
"content": tool_call_result_1["content"] if expect_content else None,
|
||||
}
|
||||
|
||||
assert_message_in_logs(
|
||||
logs[7], "gen_ai.tool.message", tool_message_1, spans[1]
|
||||
)
|
||||
|
||||
message = {
|
||||
"role": "assistant",
|
||||
"content": response_1.choices[0].message.content
|
||||
if expect_content
|
||||
else None,
|
||||
}
|
||||
choice = {
|
||||
"index": 0,
|
||||
"finish_reason": "stop",
|
||||
"message": message,
|
||||
}
|
||||
assert_message_in_logs(logs[8], "gen_ai.choice", choice, spans[1])
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_streaming(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_with_content
|
||||
):
|
||||
llm_model_value = "gpt-4"
|
||||
messages_value = [{"role": "user", "content": "Say this is a test"}]
|
||||
|
||||
kwargs = {
|
||||
"model": llm_model_value,
|
||||
"messages": messages_value,
|
||||
"stream": True,
|
||||
"stream_options": {"include_usage": True},
|
||||
}
|
||||
|
||||
response_stream_usage = None
|
||||
response_stream_model = None
|
||||
response_stream_id = None
|
||||
response_stream_result = ""
|
||||
response = await async_openai_client.chat.completions.create(**kwargs)
|
||||
async for chunk in response:
|
||||
if chunk.choices:
|
||||
response_stream_result += chunk.choices[0].delta.content or ""
|
||||
|
||||
# get the last chunk
|
||||
if getattr(chunk, "usage", None):
|
||||
response_stream_usage = chunk.usage
|
||||
response_stream_model = chunk.model
|
||||
response_stream_id = chunk.id
|
||||
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert_all_attributes(
|
||||
spans[0],
|
||||
llm_model_value,
|
||||
response_stream_id,
|
||||
response_stream_model,
|
||||
response_stream_usage.prompt_tokens,
|
||||
response_stream_usage.completion_tokens,
|
||||
)
|
||||
|
||||
logs = log_exporter.get_finished_logs()
|
||||
assert len(logs) == 2
|
||||
|
||||
user_message = {"content": "Say this is a test"}
|
||||
assert_message_in_logs(
|
||||
logs[0], "gen_ai.user.message", user_message, spans[0]
|
||||
)
|
||||
|
||||
choice_event = {
|
||||
"index": 0,
|
||||
"finish_reason": "stop",
|
||||
"message": {"role": "assistant", "content": response_stream_result},
|
||||
}
|
||||
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event, spans[0])
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_streaming_not_complete(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_with_content
|
||||
):
|
||||
llm_model_value = "gpt-4"
|
||||
messages_value = [{"role": "user", "content": "Say this is a test"}]
|
||||
|
||||
kwargs = {
|
||||
"model": llm_model_value,
|
||||
"messages": messages_value,
|
||||
"stream": True,
|
||||
}
|
||||
|
||||
response_stream_model = None
|
||||
response_stream_id = None
|
||||
response_stream_result = ""
|
||||
response = await async_openai_client.chat.completions.create(**kwargs)
|
||||
idx = 0
|
||||
async for chunk in response:
|
||||
if chunk.choices:
|
||||
response_stream_result += chunk.choices[0].delta.content or ""
|
||||
if idx == 1:
|
||||
# fake a stop
|
||||
break
|
||||
|
||||
if chunk.model:
|
||||
response_stream_model = chunk.model
|
||||
if chunk.id:
|
||||
response_stream_id = chunk.id
|
||||
idx += 1
|
||||
|
||||
response.close()
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert_all_attributes(
|
||||
spans[0], llm_model_value, response_stream_id, response_stream_model
|
||||
)
|
||||
|
||||
logs = log_exporter.get_finished_logs()
|
||||
assert len(logs) == 2
|
||||
|
||||
user_message = {"content": "Say this is a test"}
|
||||
assert_message_in_logs(
|
||||
logs[0], "gen_ai.user.message", user_message, spans[0]
|
||||
)
|
||||
|
||||
choice_event = {
|
||||
"index": 0,
|
||||
"finish_reason": "error",
|
||||
"message": {"role": "assistant", "content": response_stream_result},
|
||||
}
|
||||
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event, spans[0])
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_multiple_choices_streaming(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_with_content
|
||||
):
|
||||
llm_model_value = "gpt-4o-mini"
|
||||
messages_value = [
|
||||
{"role": "system", "content": "You're a helpful assistant."},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?",
|
||||
},
|
||||
]
|
||||
|
||||
response_0 = await async_openai_client.chat.completions.create(
|
||||
messages=messages_value,
|
||||
model=llm_model_value,
|
||||
n=2,
|
||||
stream=True,
|
||||
stream_options={"include_usage": True},
|
||||
)
|
||||
|
||||
# two strings for each choice
|
||||
response_stream_result = ["", ""]
|
||||
finish_reasons = ["", ""]
|
||||
async for chunk in response_0:
|
||||
if chunk.choices:
|
||||
for choice in chunk.choices:
|
||||
response_stream_result[choice.index] += (
|
||||
choice.delta.content or ""
|
||||
)
|
||||
if choice.finish_reason:
|
||||
finish_reasons[choice.index] = choice.finish_reason
|
||||
|
||||
# get the last chunk
|
||||
if getattr(chunk, "usage", None):
|
||||
response_stream_usage = chunk.usage
|
||||
response_stream_model = chunk.model
|
||||
response_stream_id = chunk.id
|
||||
|
||||
# sanity check
|
||||
assert "stop" == finish_reasons[0]
|
||||
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert_all_attributes(
|
||||
spans[0],
|
||||
llm_model_value,
|
||||
response_stream_id,
|
||||
response_stream_model,
|
||||
response_stream_usage.prompt_tokens,
|
||||
response_stream_usage.completion_tokens,
|
||||
)
|
||||
|
||||
logs = log_exporter.get_finished_logs()
|
||||
assert len(logs) == 4
|
||||
|
||||
system_message = {"content": messages_value[0]["content"]}
|
||||
assert_message_in_logs(
|
||||
logs[0], "gen_ai.system.message", system_message, spans[0]
|
||||
)
|
||||
|
||||
user_message = {
|
||||
"content": "What's the weather in Seattle and San Francisco today?"
|
||||
}
|
||||
assert_message_in_logs(
|
||||
logs[1], "gen_ai.user.message", user_message, spans[0]
|
||||
)
|
||||
|
||||
choice_event_0 = {
|
||||
"index": 0,
|
||||
"finish_reason": "stop",
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "".join(response_stream_result[0]),
|
||||
},
|
||||
}
|
||||
assert_message_in_logs(logs[2], "gen_ai.choice", choice_event_0, spans[0])
|
||||
|
||||
choice_event_1 = {
|
||||
"index": 1,
|
||||
"finish_reason": "stop",
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "".join(response_stream_result[1]),
|
||||
},
|
||||
}
|
||||
assert_message_in_logs(logs[3], "gen_ai.choice", choice_event_1, spans[0])
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_multiple_tools_streaming_with_content(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_with_content
|
||||
):
|
||||
await async_chat_completion_multiple_tools_streaming(
|
||||
span_exporter, log_exporter, async_openai_client, True
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio()
|
||||
async def test_async_chat_completion_multiple_tools_streaming_no_content(
|
||||
span_exporter, log_exporter, async_openai_client, instrument_no_content
|
||||
):
|
||||
await async_chat_completion_multiple_tools_streaming(
|
||||
span_exporter, log_exporter, async_openai_client, False
|
||||
)
|
||||
|
||||
|
||||
async def async_chat_completion_multiple_tools_streaming(
|
||||
span_exporter, log_exporter, async_openai_client, expect_content
|
||||
):
|
||||
llm_model_value = "gpt-4o-mini"
|
||||
messages_value = [
|
||||
{"role": "system", "content": "You're a helpful assistant."},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What's the weather in Seattle and San Francisco today?",
|
||||
},
|
||||
]
|
||||
|
||||
response = await async_openai_client.chat.completions.create(
|
||||
messages=messages_value,
|
||||
model=llm_model_value,
|
||||
tool_choice="auto",
|
||||
tools=[get_current_weather_tool_definition()],
|
||||
stream=True,
|
||||
stream_options={"include_usage": True},
|
||||
)
|
||||
|
||||
finish_reason = None
|
||||
# two tools
|
||||
tool_names = ["", ""]
|
||||
tool_call_ids = ["", ""]
|
||||
tool_args = ["", ""]
|
||||
async for chunk in response:
|
||||
if chunk.choices:
|
||||
if chunk.choices[0].finish_reason:
|
||||
finish_reason = chunk.choices[0].finish_reason
|
||||
for tool_call in chunk.choices[0].delta.tool_calls or []:
|
||||
t_idx = tool_call.index
|
||||
if tool_call.id:
|
||||
tool_call_ids[t_idx] = tool_call.id
|
||||
if tool_call.function:
|
||||
if tool_call.function.arguments:
|
||||
tool_args[t_idx] += tool_call.function.arguments
|
||||
if tool_call.function.name:
|
||||
tool_names[t_idx] = tool_call.function.name
|
||||
|
||||
# get the last chunk
|
||||
if getattr(chunk, "usage", None):
|
||||
response_stream_usage = chunk.usage
|
||||
response_stream_model = chunk.model
|
||||
response_stream_id = chunk.id
|
||||
|
||||
# sanity check
|
||||
assert "tool_calls" == finish_reason
|
||||
|
||||
spans = span_exporter.get_finished_spans()
|
||||
assert_all_attributes(
|
||||
spans[0],
|
||||
llm_model_value,
|
||||
response_stream_id,
|
||||
response_stream_model,
|
||||
response_stream_usage.prompt_tokens,
|
||||
response_stream_usage.completion_tokens,
|
||||
)
|
||||
|
||||
logs = log_exporter.get_finished_logs()
|
||||
assert len(logs) == 3
|
||||
|
||||
system_message = (
|
||||
{"content": messages_value[0]["content"]} if expect_content else None
|
||||
)
|
||||
assert_message_in_logs(
|
||||
logs[0], "gen_ai.system.message", system_message, spans[0]
|
||||
)
|
||||
|
||||
user_message = (
|
||||
{"content": "What's the weather in Seattle and San Francisco today?"}
|
||||
if expect_content
|
||||
else None
|
||||
)
|
||||
assert_message_in_logs(
|
||||
logs[1], "gen_ai.user.message", user_message, spans[0]
|
||||
)
|
||||
|
||||
choice_event = {
|
||||
"index": 0,
|
||||
"finish_reason": "tool_calls",
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": tool_call_ids[0],
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": tool_names[0],
|
||||
"arguments": (
|
||||
tool_args[0].replace("\n", "")
|
||||
if expect_content
|
||||
else None
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": tool_call_ids[1],
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": tool_names[1],
|
||||
"arguments": (
|
||||
tool_args[1].replace("\n", "")
|
||||
if expect_content
|
||||
else None
|
||||
),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
assert_message_in_logs(logs[2], "gen_ai.choice", choice_event, spans[0])
|
||||
|
||||
|
||||
def assert_message_in_logs(log, event_name, expected_content, parent_span):
|
||||
assert log.log_record.attributes[EventAttributes.EVENT_NAME] == event_name
|
||||
assert (
|
||||
log.log_record.attributes[GenAIAttributes.GEN_AI_SYSTEM]
|
||||
== GenAIAttributes.GenAiSystemValues.OPENAI.value
|
||||
)
|
||||
|
||||
if not expected_content:
|
||||
assert not log.log_record.body
|
||||
else:
|
||||
assert log.log_record.body
|
||||
assert dict(log.log_record.body) == remove_none_values(
|
||||
expected_content
|
||||
)
|
||||
assert_log_parent(log, parent_span)
|
||||
|
||||
|
||||
def remove_none_values(body):
|
||||
result = {}
|
||||
for key, value in body.items():
|
||||
if value is None:
|
||||
continue
|
||||
if isinstance(value, dict):
|
||||
result[key] = remove_none_values(value)
|
||||
elif isinstance(value, list):
|
||||
result[key] = [remove_none_values(i) for i in value]
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
def assert_completion_attributes(
|
||||
span: ReadableSpan,
|
||||
request_model: str,
|
||||
response: ChatCompletion,
|
||||
operation_name: str = "chat",
|
||||
server_address: str = "api.openai.com",
|
||||
):
|
||||
return assert_all_attributes(
|
||||
span,
|
||||
request_model,
|
||||
response.id,
|
||||
response.model,
|
||||
response.usage.prompt_tokens,
|
||||
response.usage.completion_tokens,
|
||||
operation_name,
|
||||
server_address,
|
||||
)
|
||||
|
||||
|
||||
def assert_all_attributes(
|
||||
span: ReadableSpan,
|
||||
request_model: str,
|
||||
response_id: str = None,
|
||||
response_model: str = None,
|
||||
input_tokens: Optional[int] = None,
|
||||
output_tokens: Optional[int] = None,
|
||||
operation_name: str = "chat",
|
||||
server_address: str = "api.openai.com",
|
||||
):
|
||||
assert span.name == f"{operation_name} {request_model}"
|
||||
assert (
|
||||
operation_name
|
||||
== span.attributes[GenAIAttributes.GEN_AI_OPERATION_NAME]
|
||||
)
|
||||
assert (
|
||||
GenAIAttributes.GenAiSystemValues.OPENAI.value
|
||||
== span.attributes[GenAIAttributes.GEN_AI_SYSTEM]
|
||||
)
|
||||
assert (
|
||||
request_model == span.attributes[GenAIAttributes.GEN_AI_REQUEST_MODEL]
|
||||
)
|
||||
if response_model:
|
||||
assert (
|
||||
response_model
|
||||
== span.attributes[GenAIAttributes.GEN_AI_RESPONSE_MODEL]
|
||||
)
|
||||
else:
|
||||
assert GenAIAttributes.GEN_AI_RESPONSE_MODEL not in span.attributes
|
||||
|
||||
if response_id:
|
||||
assert (
|
||||
response_id == span.attributes[GenAIAttributes.GEN_AI_RESPONSE_ID]
|
||||
)
|
||||
else:
|
||||
assert GenAIAttributes.GEN_AI_RESPONSE_ID not in span.attributes
|
||||
|
||||
if input_tokens:
|
||||
assert (
|
||||
input_tokens
|
||||
== span.attributes[GenAIAttributes.GEN_AI_USAGE_INPUT_TOKENS]
|
||||
)
|
||||
else:
|
||||
assert GenAIAttributes.GEN_AI_USAGE_INPUT_TOKENS not in span.attributes
|
||||
|
||||
if output_tokens:
|
||||
assert (
|
||||
output_tokens
|
||||
== span.attributes[GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS]
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS not in span.attributes
|
||||
)
|
||||
|
||||
assert server_address == span.attributes[ServerAttributes.SERVER_ADDRESS]
|
||||
|
||||
|
||||
def assert_log_parent(log, span):
|
||||
assert log.log_record.trace_id == span.get_span_context().trace_id
|
||||
assert log.log_record.span_id == span.get_span_context().span_id
|
||||
assert log.log_record.trace_flags == span.get_span_context().trace_flags
|
||||
|
||||
|
||||
def get_current_weather_tool_definition():
|
||||
return {
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. Boston, MA",
|
||||
},
|
||||
},
|
||||
"required": ["location"],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
},
|
||||
}
|
Reference in New Issue
Block a user