Source code for agent_inspect.models.llm_response

from typing import Optional
from dataclasses import dataclass

[docs] @dataclass class LLMResponse: """ Represents a response from a LLM. """ status: int """ Contains the http status code from the LLM. """ completion: Optional[str] = None """ Represents the text output generated by the LLM in response to an input prompt. Can be none if there is no output generated from the LLM due to an error. """ error_message: Optional[str] = None """ Contains error related information from the LLM in response to an input prompt. Can be none if there is no error occurred. """