GroqLLMConfiguration#

class council.llm.GroqLLMConfiguration(model: str, api_key: str)[source]#

Bases: LLMConfigurationBase

__init__(model: str, api_key: str) None[source]#

Initialize a new instance

Parameters:
  • api_key (str) – the api key

  • model (str) – the model name

property model: Parameter[str]#

Groq model name

property api_key: Parameter[str]#

Groq API Key

property frequency_penalty: Parameter[float]#

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model’s likelihood to repeat the same line verbatim.

property max_tokens: Parameter[int]#

Maximum number of tokens to generate.

property presence_penalty: Parameter[float]#

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model’s likelihood to talk about new topics.

property seed: Parameter[int]#

Random seed for generation.

property stop: Parameter[str]#

Stop sequence.

property temperature: Parameter[float]#

What sampling temperature to use, between 0 and 2.

property top_p: Parameter[float]#

Nucleus sampling threshold.

params_to_args() Dict[str, Any][source]#

Convert parameters to options dict