Skip to content

Class peagen.core._llm.GenericLLM

peagen.core._llm.GenericLLM

GenericLLM()

A generic class that provides a unified interface to all supported LLM providers. It dynamically loads and instantiates the appropriate LLM class based on the specified provider.

Supported providers include: - OpenAI/O1 (o1, openai) - Anthropic (anthropic) - Cohere (cohere) - DeepInfra (deepinfra) - LlamaCpp (llamacpp) - Groq (groq) - Gemini/Google (gemini, google) - Mistral (mistral) - DeepSeek (deepseek) - AI21Studio (ai21studio) - Hyperbolic (hyperbolic) - Cerebras (cerebras)

Source code in peagen/core/_llm.py
54
55
def __init__(self):
    self._llm_instance = None

get_llm

get_llm(
    provider,
    api_key=None,
    model_name=None,
    timeout=1200.0,
    **kwargs,
)

Creates and returns an instance of the specified LLM provider.

PARAMETER DESCRIPTION
provider

The name of the LLM provider (e.g., 'openai', 'anthropic', 'deepinfra')

TYPE: str

api_key

API key for the provider

TYPE: Optional[Union[str, SecretStr]] DEFAULT: None

model_name

The specific model to use

TYPE: Optional[str] DEFAULT: None

timeout

Request timeout in seconds

TYPE: Union[int, float] DEFAULT: 1200.0

**kwargs

Additional arguments to pass to the LLM constructor

DEFAULT: {}

RETURNS DESCRIPTION
LLMBase

An instance of the requested LLM

RAISES DESCRIPTION
ValueError

If the provider is not supported or no API key is found

ImportError

If the provider module cannot be imported

Source code in peagen/core/_llm.py
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
def get_llm(
    self,
    provider: str,
    api_key: Optional[Union[str, SecretStr]] = None,
    model_name: Optional[str] = None,
    timeout: Union[int, float] = 1200.0,
    **kwargs,
) -> LLMBase:
    """
    Creates and returns an instance of the specified LLM provider.

    Args:
        provider: The name of the LLM provider (e.g., 'openai', 'anthropic', 'deepinfra')
        api_key: API key for the provider
        model_name: The specific model to use
        timeout: Request timeout in seconds
        **kwargs: Additional arguments to pass to the LLM constructor

    Returns:
        An instance of the requested LLM

    Raises:
        ValueError: If the provider is not supported or no API key is found
        ImportError: If the provider module cannot be imported
    """

    provider = provider.lower()
    if provider in self._providers:
        class_name = self._providers[provider]
    else:
        raise ValueError(
            f"Unsupported LLM provider: {provider}. "
            f"Supported providers: {list(self._providers.keys())}"
        )

    # 1️⃣ CLI-provided api_key stays
    # 2️⃣ Try .peagen.toml for provider-specific API key
    if api_key is None:
        toml_path = None
        for folder in [Path.cwd(), *Path.cwd().parents]:
            candidate = folder / ".peagen.toml"
            if candidate.is_file():
                toml_path = candidate
                break
        if toml_path:
            try:
                with toml_path.open("rb") as f:
                    toml_data = tomllib.load(f)
                llm_section = toml_data.get("llm", {})
                provider_section = llm_section.get(provider, {}) or llm_section.get(
                    provider.lower(), {}
                )
                toml_api_key = provider_section.get(
                    "API_KEY"
                ) or provider_section.get("api_key")
                if toml_api_key:
                    api_key = toml_api_key
            except Exception:
                pass

    # 3️⃣ Fallback to environment variable
    if api_key is None:
        env_var = f"{provider.upper()}_API_KEY"
        api_key = os.environ.get(env_var)

    # 4️⃣ Error if no API key found
    if api_key is None:
        raise ValueError(
            f"No API key provided for {provider}. "
            f"Please provide it via --api-key, .peagen.toml [llm.{provider}].API_KEY, or set the {env_var} environment variable."
        )

    # Dynamically import provider module
    try:
        module = importlib.import_module(f"swarmauri_standard.llms.{class_name}")
    except ImportError:
        try:
            module = importlib.import_module(f"swarmauri.llms.{class_name}")
        except ImportError:
            raise ImportError(
                f"Could not import {class_name} from any known module path"
            )

    llm_class = getattr(module, class_name)

    # Prepare initialization arguments
    init_args = {"timeout": timeout, **kwargs}
    if model_name:
        init_args["name"] = model_name
    if api_key:
        init_args["api_key"] = api_key

    # Create and return the LLM instance
    self._llm_instance = llm_class(**init_args)
    return self._llm_instance