Skip to content

anthropic_tracer

Message

Bases: Message

Wrapper of anthropic.types.Message with ImpactsOutput

MessageStream

Bases: MessageStream

Wrapper of anthropic.lib.streaming.MessageStream with ImpactsOutput

AsyncMessageStream

Bases: AsyncMessageStream

Wrapper of anthropic.lib.streaming.AsyncMessageStream with ImpactsOutput

MessageStreamManager(api_request)

Bases: Generic[MessageStreamT]

Re-writing of Anthropic's MessageStreamManager with wrapped MessageStream

Source code in ecologits/tracers/anthropic_tracer.py
def __init__(self, api_request: Callable[[], MessageStream]) -> None:
    self.__api_request = api_request

AsyncMessageStreamManager(api_request)

Bases: Generic[AsyncMessageStreamT]

Re-writing of Anthropic's AsyncMessageStreamManager with wrapped AsyncMessageStream

Source code in ecologits/tracers/anthropic_tracer.py
def __init__(self, api_request: Awaitable[AsyncMessageStream]) -> None:
    self.__api_request = api_request

AnthropicInstrumentor()

Instrumentor initialized by EcoLogits to automatically wrap all Anthropic calls

Source code in ecologits/tracers/anthropic_tracer.py
def __init__(self) -> None:
    self.wrapped_methods = [
        {
            "module": "anthropic.resources",
            "name": "Messages.create",
            "wrapper": anthropic_chat_wrapper,
        },
        {
            "module": "anthropic.resources",
            "name": "AsyncMessages.create",
            "wrapper": anthropic_async_chat_wrapper,
        },
        {
            "module": "anthropic.resources",
            "name": "Messages.stream",
            "wrapper": anthropic_stream_chat_wrapper,
        },
        {
            "module": "anthropic.resources",
            "name": "AsyncMessages.stream",
            "wrapper": anthropic_async_stream_chat_wrapper,
        },
    ]

anthropic_chat_wrapper(wrapped, instance, args, kwargs)

Function that wraps an Anthropic answer with computed impacts

Parameters:

Name Type Description Default
wrapped Callable

Callable that returns the LLM response

required
instance Anthropic

Never used - for compatibility with wrapt

required
args Any

Arguments of the callable

required
kwargs Any

Keyword arguments of the callable

required

Returns:

Type Description
Message

A wrapped Message with impacts

Source code in ecologits/tracers/anthropic_tracer.py
def anthropic_chat_wrapper(
    wrapped: Callable, instance: Anthropic, args: Any, kwargs: Any  # noqa: ARG001
) -> Message:
    """
    Function that wraps an Anthropic answer with computed impacts

    Args:
        wrapped: Callable that returns the LLM response
        instance: Never used - for compatibility with `wrapt`
        args: Arguments of the callable
        kwargs: Keyword arguments of the callable

    Returns:
        A wrapped `Message` with impacts
    """

    timer_start = time.perf_counter()
    response = wrapped(*args, **kwargs)
    request_latency = time.perf_counter() - timer_start
    model_name = response.model
    impacts = llm_impacts(
        provider=PROVIDER,
        model_name=model_name,
        output_token_count=response.usage.output_tokens,
        request_latency=request_latency,
        electricity_mix_zone=EcoLogits.config.electricity_mix_zone
    )
    if impacts is not None:
        return Message(**response.model_dump(), impacts=impacts)
    else:
        return response

anthropic_async_chat_wrapper(wrapped, instance, args, kwargs) async

Function that wraps an Anthropic answer with computed impacts in async mode

Parameters:

Name Type Description Default
wrapped Callable

Async callable that returns the LLM response

required
instance AsyncAnthropic

Never used - for compatibility with wrapt

required
args Any

Arguments of the callable

required
kwargs Any

Keyword arguments of the callable

required

Returns:

Type Description
Message

A wrapped Message with impacts

Source code in ecologits/tracers/anthropic_tracer.py
async def anthropic_async_chat_wrapper(
    wrapped: Callable, instance: AsyncAnthropic, args: Any, kwargs: Any  # noqa: ARG001
) -> Message:
    """
    Function that wraps an Anthropic answer with computed impacts in async mode

    Args:
        wrapped: Async callable that returns the LLM response
        instance: Never used - for compatibility with `wrapt`
        args: Arguments of the callable
        kwargs: Keyword arguments of the callable

    Returns:
        A wrapped `Message` with impacts
    """

    timer_start = time.perf_counter()
    response = await wrapped(*args, **kwargs)
    request_latency = time.perf_counter() - timer_start
    model_name = response.model
    impacts = llm_impacts(
        provider=PROVIDER,
        model_name=model_name,
        output_token_count=response.usage.output_tokens,
        request_latency=request_latency,
        electricity_mix_zone=EcoLogits.config.electricity_mix_zone
    )
    if impacts is not None:
        return Message(**response.model_dump(), impacts=impacts)
    else:
        return response

anthropic_stream_chat_wrapper(wrapped, instance, args, kwargs)

Function that wraps an Anthropic answer with computed impacts in streaming mode

Parameters:

Name Type Description Default
wrapped Callable

Callable that returns the LLM response

required
instance Anthropic

Never used - for compatibility with wrapt

required
args Any

Arguments of the callable

required
kwargs Any

Keyword arguments of the callable

required

Returns:

Type Description
MessageStreamManager

A wrapped MessageStreamManager with impacts

Source code in ecologits/tracers/anthropic_tracer.py
def anthropic_stream_chat_wrapper(
    wrapped: Callable, instance: Anthropic, args: Any, kwargs: Any  # noqa: ARG001
) -> MessageStreamManager:
    """
    Function that wraps an Anthropic answer with computed impacts in streaming mode

    Args:
        wrapped: Callable that returns the LLM response
        instance: Never used - for compatibility with `wrapt`
        args: Arguments of the callable
        kwargs: Keyword arguments of the callable

    Returns:
        A wrapped `MessageStreamManager` with impacts
    """
    response = wrapped(*args, **kwargs)
    return MessageStreamManager(response._MessageStreamManager__api_request)    # noqa: SLF001

anthropic_async_stream_chat_wrapper(wrapped, instance, args, kwargs)

Function that wraps an Anthropic answer with computed impacts in streaming and async mode

Parameters:

Name Type Description Default
wrapped Callable

Callable that returns the LLM response

required
instance AsyncAnthropic

Never used - for compatibility with wrapt

required
args Any

Arguments of the callable

required
kwargs Any

Keyword arguments of the callable

required

Returns:

Type Description
AsyncMessageStreamManager

A wrapped AsyncMessageStreamManager with impacts

Source code in ecologits/tracers/anthropic_tracer.py
def anthropic_async_stream_chat_wrapper(
    wrapped: Callable, instance: AsyncAnthropic, args: Any, kwargs: Any  # noqa: ARG001
) -> AsyncMessageStreamManager:
    """
    Function that wraps an Anthropic answer with computed impacts in streaming and async mode

    Args:
        wrapped: Callable that returns the LLM response
        instance: Never used - for compatibility with `wrapt`
        args: Arguments of the callable
        kwargs: Keyword arguments of the callable

    Returns:
        A wrapped `AsyncMessageStreamManager` with impacts
    """
    response = wrapped(*args, **kwargs)
    return AsyncMessageStreamManager(response._AsyncMessageStreamManager__api_request)  # noqa: SLF001