Implement uninstrument for opentelemetry-instrumentation-vertexai (#3328)

* Implement uninstrument for `opentelemetry-insturmentation-vertexai`

* Reuse fixture from conftest.py
This commit is contained in:
Aaron Abbott
2025-03-06 10:43:49 -05:00
committed by GitHub
parent 4f9ee01b5f
commit 9811782358
4 changed files with 82 additions and 17 deletions

View File

@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## Unreleased
- Implement uninstrument for `opentelemetry-instrumentation-vertexai`
([#3328](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3328))
## Version 2.0b0 (2025-02-24)
- Added Vertex AI spans for request parameters

View File

@ -47,6 +47,7 @@ from wrapt import (
from opentelemetry._events import get_event_logger
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.utils import unwrap
from opentelemetry.instrumentation.vertexai.package import _instruments
from opentelemetry.instrumentation.vertexai.patch import (
generate_content_create,
@ -56,6 +57,23 @@ from opentelemetry.semconv.schemas import Schemas
from opentelemetry.trace import get_tracer
def _client_classes():
# This import is very slow, do it lazily in case instrument() is not called
# pylint: disable=import-outside-toplevel
from google.cloud.aiplatform_v1.services.prediction_service import (
client,
)
from google.cloud.aiplatform_v1beta1.services.prediction_service import (
client as client_v1beta1,
)
return (
client.PredictionServiceClient,
client_v1beta1.PredictionServiceClient,
)
class VertexAIInstrumentor(BaseInstrumentor):
def instrumentation_dependencies(self) -> Collection[str]:
return _instruments
@ -77,20 +95,15 @@ class VertexAIInstrumentor(BaseInstrumentor):
event_logger_provider=event_logger_provider,
)
wrap_function_wrapper(
module="google.cloud.aiplatform_v1beta1.services.prediction_service.client",
name="PredictionServiceClient.generate_content",
wrapper=generate_content_create(
tracer, event_logger, is_content_enabled()
),
)
wrap_function_wrapper(
module="google.cloud.aiplatform_v1.services.prediction_service.client",
name="PredictionServiceClient.generate_content",
wrapper=generate_content_create(
tracer, event_logger, is_content_enabled()
),
)
for client_class in _client_classes():
wrap_function_wrapper(
client_class,
name="generate_content",
wrapper=generate_content_create(
tracer, event_logger, is_content_enabled()
),
)
def _uninstrument(self, **kwargs: Any) -> None:
"""TODO: implemented in later PR"""
for client_class in _client_classes():
unwrap(client_class, "generate_content")

View File

@ -111,7 +111,8 @@ def instrument_no_content(
yield instrumentor
os.environ.pop(OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT, None)
instrumentor.uninstrument()
if instrumentor.is_instrumented_by_opentelemetry:
instrumentor.uninstrument()
@pytest.fixture
@ -130,7 +131,8 @@ def instrument_with_content(
yield instrumentor
os.environ.pop(OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT, None)
instrumentor.uninstrument()
if instrumentor.is_instrumented_by_opentelemetry:
instrumentor.uninstrument()
@pytest.fixture(scope="module")

View File

@ -0,0 +1,47 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import pytest
from google.cloud.aiplatform_v1.services.prediction_service import client
from google.cloud.aiplatform_v1beta1.services.prediction_service import (
client as client_v1beta1,
)
from opentelemetry.instrumentation.vertexai import VertexAIInstrumentor
@pytest.fixture(
name="client_class",
params=[
pytest.param(client.PredictionServiceClient, id="v1"),
pytest.param(client_v1beta1.PredictionServiceClient, id="v1beta1"),
],
)
def fixture_client_class(request: pytest.FixtureRequest):
return request.param
def test_instruments(
instrument_with_content: VertexAIInstrumentor, client_class
):
assert hasattr(client_class.generate_content, "__wrapped__")
def test_uninstruments(
instrument_with_content: VertexAIInstrumentor, client_class
):
instrument_with_content.uninstrument()
assert not hasattr(client_class.generate_content, "__wrapped__")