Skip to content

google_genai_tracer

GenerateContentResponse

Bases: GenerateContentResponse

Wrapper of google.genai.types.GenerateContentResponse with ImpactsOutput

GoogleGenaiInstrumentor()

Instrumentor initialized by EcoLogits to automatically wrap all Google GenAI calls

Source code in ecologits/tracers/google_genai_tracer.py
def __init__(self) -> None:
    self.wrapped_methods = [
        {
            "module": "google.genai.models",
            "name": "Models.generate_content",
            "wrapper": google_genai_content_wrapper,
        },
        {
            "module": "google.genai.models",
            "name": "Models.generate_content_stream",
            "wrapper": google_genai_content_stream_wrapper,
        },
        {
            "module": "google.genai.models",
            "name": "AsyncModels.generate_content",
            "wrapper": google_genai_async_content_wrapper
        },
        {
            "module": "google.genai.models",
            "name": "AsyncModels.generate_content_stream",
            "wrapper": google_genai_async_content_stream_wrapper
        }
    ]

google_genai_content_wrapper(wrapped, instance, args, kwargs)

Function that wraps Google GenAI answer with computed impacts

Parameters:

Name Type Description Default
wrapped Callable

Callable that returns the LLM response

required
instance Models

Never used - for compatibility with wrapt

required
args Any

Arguments of the callable

required
kwargs Any

Keyword arguments of the callable

required

Returns:

Type Description
GenerateContentResponse

A wrapped GenerateContentResponse with impacts

Source code in ecologits/tracers/google_genai_tracer.py
def google_genai_content_wrapper(
    wrapped: Callable,
    instance: Models,   # noqa: ARG001
    args: Any,
    kwargs: Any
) -> GenerateContentResponse:
    """
    Function that wraps Google GenAI answer with computed impacts

    Args:
        wrapped: Callable that returns the LLM response
        instance: Never used - for compatibility with `wrapt`
        args: Arguments of the callable
        kwargs: Keyword arguments of the callable

    Returns:
        A wrapped `GenerateContentResponse` with impacts
    """
    timer_start = time.perf_counter()
    response = wrapped(*args, **kwargs)
    request_latency = time.perf_counter() - timer_start
    model_name = kwargs["model"]
    input_tokens = response.usage_metadata.candidates_token_count
    output_tokens = response.usage_metadata.total_token_count - input_tokens
    impacts = llm_impacts(
        provider=PROVIDER,
        model_name=model_name,
        output_token_count=output_tokens,
        request_latency=request_latency,
        electricity_mix_zone=EcoLogits.config.electricity_mix_zone,
    )
    if impacts is not None:
        if EcoLogits.config.opentelemetry:
            EcoLogits.config.opentelemetry.record_request(
                input_tokens=input_tokens,
                output_tokens=output_tokens,
                request_latency=request_latency,
                impacts=impacts,
                provider=PROVIDER,
                model=model_name,
                endpoint=f"/v1beta/models/{model_name}:generateContent"
            )

        return GenerateContentResponse(**response.model_dump(), impacts=impacts)
    else:
        return response

google_genai_content_stream_wrapper(wrapped, instance, args, kwargs)

Function that wraps Google GenAI answer with computed impacts in streaming mode.

Parameters:

Name Type Description Default
wrapped Callable

Callable that returns the LLM response

required
instance Models

Never used - for compatibility with wrapt

required
args Any

Arguments of the callable

required
kwargs Any

Keyword arguments of the callable

required

Yields:

Type Description
GenerateContentResponse

A wrapped GenerateContentResponse with impacts

Source code in ecologits/tracers/google_genai_tracer.py
def google_genai_content_stream_wrapper(
    wrapped: Callable,
    instance: Models,   # noqa: ARG001
    args: Any,
    kwargs: Any
) -> Iterator[GenerateContentResponse]:
    """
    Function that wraps Google GenAI answer with computed impacts in streaming mode.

    Args:
        wrapped: Callable that returns the LLM response
        instance: Never used - for compatibility with `wrapt`
        args: Arguments of the callable
        kwargs: Keyword arguments of the callable

    Yields:
        A wrapped `GenerateContentResponse` with impacts
    """
    timer_start = time.perf_counter()
    stream = wrapped(*args, **kwargs)
    for chunk in stream:
        if chunk.candidates[0].finish_reason is None:
            yield GenerateContentResponse(**chunk.model_dump(), impacts=None)

        else:
            request_latency = time.perf_counter() - timer_start
            model_name = kwargs["model"]
            input_tokens = chunk.usage_metadata.candidates_token_count
            output_tokens = chunk.usage_metadata.total_token_count - input_tokens
            impacts = llm_impacts(
                provider=PROVIDER,
                model_name=model_name,
                output_token_count=output_tokens,
                request_latency=request_latency,
                electricity_mix_zone=EcoLogits.config.electricity_mix_zone,
            )
            if impacts is not None:
                if EcoLogits.config.opentelemetry:
                    EcoLogits.config.opentelemetry.record_request(
                        input_tokens=input_tokens,
                        output_tokens=output_tokens,
                        request_latency=request_latency,
                        impacts=impacts,
                        provider=PROVIDER,
                        model=model_name,
                        endpoint=f"/v1beta/models/{model_name}:generateContent"
                    )

                yield GenerateContentResponse(**chunk.model_dump(), impacts=impacts)
            else:
                yield GenerateContentResponse(**chunk.model_dump(), impacts=None)

google_genai_async_content_wrapper(wrapped, instance, args, kwargs) async

Function that wraps Google GenAI answer with computed impacts in async mode.

Parameters:

Name Type Description Default
wrapped Callable

Callable that returns the LLM response

required
instance Models

Never used - for compatibility with wrapt

required
args Any

Arguments of the callable

required
kwargs Any

Keyword arguments of the callable

required

Returns:

Type Description
GenerateContentResponse

A wrapped GenerateContentResponse with impacts

Source code in ecologits/tracers/google_genai_tracer.py
async def google_genai_async_content_wrapper(
    wrapped: Callable,
    instance: Models,   # noqa: ARG001
    args: Any,
    kwargs: Any
) -> GenerateContentResponse:
    """
    Function that wraps Google GenAI answer with computed impacts in async mode.

    Args:
        wrapped: Callable that returns the LLM response
        instance: Never used - for compatibility with `wrapt`
        args: Arguments of the callable
        kwargs: Keyword arguments of the callable

    Returns:
        A wrapped `GenerateContentResponse` with impacts
    """
    timer_start = time.perf_counter()
    response = await wrapped(*args, **kwargs)
    request_latency = time.perf_counter() - timer_start
    model_name = kwargs["model"]
    input_tokens = response.usage_metadata.candidates_token_count
    output_tokens = response.usage_metadata.total_token_count - input_tokens
    impacts = llm_impacts(
        provider=PROVIDER,
        model_name=model_name,
        output_token_count=output_tokens,
        request_latency=request_latency,
        electricity_mix_zone=EcoLogits.config.electricity_mix_zone,
    )
    if impacts is not None:
        if EcoLogits.config.opentelemetry:
            EcoLogits.config.opentelemetry.record_request(
                input_tokens=input_tokens,
                output_tokens=output_tokens,
                request_latency=request_latency,
                impacts=impacts,
                provider=PROVIDER,
                model=model_name,
                endpoint=f"/v1beta/models/{model_name}:generateContent"
            )

        return GenerateContentResponse(**response.model_dump(), impacts=impacts)
    else:
        return response

google_genai_async_content_stream_wrapper(wrapped, instance, args, kwargs) async

Function that wraps Google GenAI answer with computed impacts in async and streaming mode.

Parameters:

Name Type Description Default
wrapped Callable

Callable that returns the LLM response

required
instance Models

Never used - for compatibility with wrapt

required
args Any

Arguments of the callable

required
kwargs Any

Keyword arguments of the callable

required

Yields:

Type Description
AsyncIterator[GenerateContentResponse]

A wrapped GenerateContentResponse with impacts

Source code in ecologits/tracers/google_genai_tracer.py
async def google_genai_async_content_stream_wrapper(
    wrapped: Callable,
    instance: Models,   # noqa: ARG001
    args: Any,
    kwargs: Any
) -> AsyncIterator[GenerateContentResponse]:
    """
    Function that wraps Google GenAI answer with computed impacts in async and streaming mode.

    Args:
        wrapped: Callable that returns the LLM response
        instance: Never used - for compatibility with `wrapt`
        args: Arguments of the callable
        kwargs: Keyword arguments of the callable

    Yields:
        A wrapped `GenerateContentResponse` with impacts
    """
    timer_start = time.perf_counter()
    stream = await wrapped(*args, **kwargs)
    return _generator(stream, timer_start=timer_start, model_name=kwargs["model"])