EchoResponseParser#

class council.llm.llm_response_parser.EchoResponseParser[source]#

Bases: object

static from_response(response: LLMResponse) LLMResponse[source]#

LLMFunction ResponseParser returning LLMResponse

StringResponseParser#

class council.llm.llm_response_parser.StringResponseParser[source]#

Bases: object

static from_response(response: LLMResponse) str[source]#

LLMFunction ResponseParser for plain text responses

BaseModelResponseParser#

class council.llm.llm_response_parser.BaseModelResponseParser[source]#

Bases: BaseModel, ABC

Base class for parsing LLM responses into structured data models

abstract classmethod from_response(response: LLMResponse) T[source]#

Parse an LLM response into a structured data model. Must be implemented by subclasses to define specific parsing logic.

validator() None[source]#

Implement custom validation logic for the parsed data. Can be overridden by subclasses to add specific validation rules. Raise LLMParsingException to trigger local correction. Alternatively, use pydantic validation.

CodeBlocksResponseParser#

class council.llm.llm_response_parser.CodeBlocksResponseParser[source]#

Bases: BaseModelResponseParser

classmethod from_response(response: LLMResponse) T[source]#

LLMFunction ResponseParser for response containing multiple named code blocks

Code Example#

Here’s how you can simplify council.llm.LLMFunction example for a sample SQL generation task.

import os

# !pip install council-ai==0.0.24

from council import OpenAILLM
from council.llm import LLMParsingException
from council.llm.llm_function import LLMFunction
from council.llm.llm_response_parser import CodeBlocksResponseParser

SYSTEM_PROMPT = "same system prompt as in LLMFunction example"


# CodeBlocksResponseParser will provide from_response() automatically for you
class SQLResultFromCodeBlocks(CodeBlocksResponseParser):
    solved: bool
    sql: str

    def validator(self) -> None:
        if "limit" not in self.sql.lower():
            raise LLMParsingException("Generated SQL query should contain a LIMIT clause")


os.environ["OPENAI_API_KEY"] = "sk-YOUR-KEY-HERE"
os.environ["OPENAI_LLM_MODEL"] = "gpt-4o-mini-2024-07-18"
llm = OpenAILLM.from_env()

# All the remaining code stays the same
llm_function: LLMFunction[SQLResultFromCodeBlocks] = LLMFunction(
    llm, SQLResultFromCodeBlocks.from_response, SYSTEM_PROMPT
)

response = llm_function.execute(
    user_message="Show me first 5 rows of the dataset ordered by price"
)
print(type(response))
print(response.sql)

YAMLBlockResponseParser#

class council.llm.llm_response_parser.YAMLBlockResponseParser[source]#

Bases: YAMLResponseParserBase

classmethod from_response(response: LLMResponse) T[source]#

LLMFunction ResponseParser for response containing a single YAML code block

classmethod to_response_template(include_hints: bool = True) str[source]#

Generate YAML block response template based on the model’s fields and their descriptions.

Parameters:

include_hints – If True, returned template will include universal YAML block formatting hints.

Code Example#

import os
from typing import Literal

# !pip install council-ai==0.0.27

from council import OpenAILLM
from council.llm.llm_function import LLMFunction
from council.llm.llm_response_parser import YAMLBlockResponseParser
from pydantic import Field

SYSTEM_PROMPT = """
Generate RPG character:

{response_template}
"""


class RPGCharacterFromYAMLBlock(YAMLBlockResponseParser):
    character_class: Literal["Warrior", "Mage", "Rogue", "Bard", "Tech Support"] = Field(
        ..., description="Character's class (Warrior, Mage, Rogue, Bard or Tech Support)"
    )
    name: str = Field(..., min_length=3, description="Character's name")
    description: str = Field(..., min_length=50, description="Character's tragic backstory, 50 chars minimum")
    health: int = Field(..., ge=1, le=100, description="Character's health, integer, from 1 to 100 points")


os.environ["OPENAI_API_KEY"] = "sk-YOUR-KEY-HERE"
os.environ["OPENAI_LLM_MODEL"] = "gpt-4o-mini-2024-07-18"
llm = OpenAILLM.from_env()

llm_function: LLMFunction[RPGCharacterFromYAMLBlock] = LLMFunction(
    llm,
    RPGCharacterFromYAMLBlock.from_response,
    SYSTEM_PROMPT.format(response_template=RPGCharacterFromYAMLBlock.to_response_template()),
)

character = llm_function.execute(user_message="Create some wise mage")
print(type(character))
print(f"{character.name}, {character.character_class} ({character.health}/100 hp)")
print(character.description)

YAMLResponseParser#

class council.llm.llm_response_parser.YAMLResponseParser[source]#

Bases: YAMLResponseParserBase

classmethod from_response(response: LLMResponse) T[source]#

LLMFunction ResponseParser for response containing raw YAML content

classmethod to_response_template(include_hints: bool = True) str[source]#

Generate YAML response template based on the model’s fields and their descriptions.

Parameters:

include_hints – If True, returned template will include universal YAML formatting hints.

JSONBlockResponseParser#

class council.llm.llm_response_parser.JSONBlockResponseParser[source]#

Bases: JSONResponseParserBase

classmethod from_response(response: LLMResponse) T[source]#

LLMFunction ResponseParser for response containing a single JSON code block

classmethod to_response_template(include_hints: bool = True) str[source]#

Generate JSON block response template based on the model’s fields and their descriptions.

Parameters:

include_hints – If True, returned template will include universal JSON block formatting hints.

JSONResponseParser#

class council.llm.llm_response_parser.JSONResponseParser[source]#

Bases: JSONResponseParserBase

classmethod from_response(response: LLMResponse) T[source]#

LLMFunction ResponseParser for response containing raw JSON content

classmethod to_response_template(include_hints: bool = True) str[source]#

Generate JSON response template based on the model’s fields and their descriptions.

Parameters:

include_hints – If True, returned template will include universal JSON formatting hints.

Code Example#

Usage example with OpenAI json mode:

import os
from typing import Literal

# !pip install council-ai==0.0.27

from council import OpenAILLM
from council.llm.llm_function import LLMFunction
from council.llm.llm_response_parser import JSONResponseParser
from pydantic import Field

SYSTEM_PROMPT = """
Generate RPG character:

{response_template}
"""


class RPGCharacterFromJSON(JSONResponseParser):
    character_class: Literal["Warrior", "Mage", "Rogue", "Bard", "Tech Support"] = Field(
        ..., description="Character's class (Warrior, Mage, Rogue, Bard or Tech Support)"
    )
    name: str = Field(..., min_length=3, description="Character's name")
    description: str = Field(..., min_length=50, description="Character's tragic backstory, 50 chars minimum")
    health: int = Field(..., ge=1, le=100, description="Character's health, integer, from 1 to 100 points")


os.environ["OPENAI_API_KEY"] = "sk-YOUR-KEY-HERE"
os.environ["OPENAI_LLM_MODEL"] = "gpt-4o-mini-2024-07-18"
llm = OpenAILLM.from_env()

llm_function: LLMFunction[RPGCharacterFromJSON] = LLMFunction(
    llm,
    RPGCharacterFromJSON.from_response,
    SYSTEM_PROMPT.format(response_template=RPGCharacterFromJSON.to_response_template()),
)

character = llm_function.execute(
    user_message="Create some strong warrior",
    response_format={"type": "json_object"}  # using OpenAI's json mode
)
print(type(character))
print(f"{character.name}, {character.character_class} ({character.health}/100 hp)")
print(character.description)