Skip to content

LLMs#

ragbits.core.llms.LLM #

LLM(model_name: str, default_options: LLMClientOptionsT | None = None)

Bases: ConfigurableComponent[LLMClientOptionsT], ABC

Abstract class for interaction with Large Language Model.

Constructs a new LLM instance.

PARAMETER DESCRIPTION
model_name

Name of the model to be used.

TYPE: str

default_options

Default options to be used.

TYPE: LLMClientOptionsT | None DEFAULT: None

RAISES DESCRIPTION
TypeError

If the subclass is missing the 'options_cls' attribute.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
def __init__(self, model_name: str, default_options: LLMClientOptionsT | None = None) -> None:
    """
    Constructs a new LLM instance.

    Args:
        model_name: Name of the model to be used.
        default_options: Default options to be used.

    Raises:
        TypeError: If the subclass is missing the 'options_cls' attribute.
    """
    super().__init__(default_options=default_options)
    self.model_name = model_name

default_options instance-attribute #

default_options: OptionsT = default_options or options_cls()

options_cls instance-attribute #

options_cls: type[LLMClientOptionsT]

default_module class-attribute instance-attribute #

default_module: ClassVar = llms

configuration_key class-attribute instance-attribute #

configuration_key: ClassVar = 'llm'

model_name instance-attribute #

model_name = model_name

client abstractmethod cached property #

client: LLMClient

Client for the LLM.

subclass_from_config classmethod #

subclass_from_config(config: ObjectContructionConfig) -> Self

Initializes the class with the provided configuration. May return a subclass of the class, if requested by the configuration.

PARAMETER DESCRIPTION
config

A model containing configuration details for the class.

TYPE: ObjectContructionConfig

RETURNS DESCRIPTION
Self

An instance of the class initialized with the provided configuration.

RAISES DESCRIPTION
InvalidConfigError

The class can't be found or is not a subclass of the current class.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_config(cls, config: ObjectContructionConfig) -> Self:
    """
    Initializes the class with the provided configuration. May return a subclass of the class,
    if requested by the configuration.

    Args:
        config: A model containing configuration details for the class.

    Returns:
        An instance of the class initialized with the provided configuration.

    Raises:
        InvalidConfigError: The class can't be found or is not a subclass of the current class.
    """
    subclass = import_by_path(config.type, cls.default_module)
    if not issubclass(subclass, cls):
        raise InvalidConfigError(f"{subclass} is not a subclass of {cls}")

    return subclass.from_config(config.config)

subclass_from_factory classmethod #

subclass_from_factory(factory_path: str) -> Self

Creates the class using the provided factory function. May return a subclass of the class, if requested by the factory.

PARAMETER DESCRIPTION
factory_path

A string representing the path to the factory function in the format of "module.submodule:factory_name".

TYPE: str

RETURNS DESCRIPTION
Self

An instance of the class initialized with the provided factory function.

RAISES DESCRIPTION
InvalidConfigError

The factory can't be found or the object returned is not a subclass of the current class.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_factory(cls, factory_path: str) -> Self:
    """
    Creates the class using the provided factory function. May return a subclass of the class,
    if requested by the factory.

    Args:
        factory_path: A string representing the path to the factory function
            in the format of "module.submodule:factory_name".

    Returns:
        An instance of the class initialized with the provided factory function.

    Raises:
        InvalidConfigError: The factory can't be found or the object returned
            is not a subclass of the current class.
    """
    factory = import_by_path(factory_path, cls.default_module)
    obj = factory()
    if not isinstance(obj, cls):
        raise InvalidConfigError(f"The object returned by factory {factory_path} is not an instance of {cls}")
    return obj

subclass_from_defaults classmethod #

subclass_from_defaults(defaults: CoreConfig, factory_path_override: str | None = None, yaml_path_override: Path | None = None) -> Self

Tries to create an instance by looking at default configuration file, and default factory function. Takes optional overrides for both, which takes a higher precedence.

PARAMETER DESCRIPTION
defaults

The CoreConfig instance containing default factory and configuration details.

TYPE: CoreConfig

factory_path_override

A string representing the path to the factory function in the format of "module.submodule:factory_name".

TYPE: str | None DEFAULT: None

yaml_path_override

A string representing the path to the YAML file containing the Ragstack instance configuration.

TYPE: Path | None DEFAULT: None

RAISES DESCRIPTION
InvalidConfigError

If the default factory or configuration can't be found.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_defaults(
    cls, defaults: CoreConfig, factory_path_override: str | None = None, yaml_path_override: Path | None = None
) -> Self:
    """
    Tries to create an instance by looking at default configuration file, and default factory function.
    Takes optional overrides for both, which takes a higher precedence.

    Args:
        defaults: The CoreConfig instance containing default factory and configuration details.
        factory_path_override: A string representing the path to the factory function
            in the format of "module.submodule:factory_name".
        yaml_path_override: A string representing the path to the YAML file containing
            the Ragstack instance configuration.

    Raises:
        InvalidConfigError: If the default factory or configuration can't be found.
    """
    if yaml_path_override:
        config = get_config_from_yaml(yaml_path_override)
        if type_config := config.get(cls.configuration_key):
            return cls.subclass_from_config(ObjectContructionConfig.model_validate(type_config))

    if factory_path_override:
        return cls.subclass_from_factory(factory_path_override)

    if default_factory := defaults.default_factories.get(cls.configuration_key):
        return cls.subclass_from_factory(default_factory)

    if default_config := defaults.default_instances_config.get(cls.configuration_key):
        return cls.subclass_from_config(ObjectContructionConfig.model_validate(default_config))

    raise NoDefaultConfigError(f"Could not find default factory or configuration for {cls.configuration_key}")

from_config classmethod #

from_config(config: dict[str, Any]) -> ConfigurableComponent

Initializes the class with the provided configuration.

PARAMETER DESCRIPTION
config

A dictionary containing configuration details for the class.

TYPE: dict[str, Any]

RETURNS DESCRIPTION
ConfigurableComponent

An instance of the class initialized with the provided configuration.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def from_config(cls, config: dict[str, Any]) -> ConfigurableComponent:
    """
    Initializes the class with the provided configuration.

    Args:
        config: A dictionary containing configuration details for the class.

    Returns:
        An instance of the class initialized with the provided configuration.
    """
    default_options = config.pop("default_options", None)
    options = cls.options_cls(**default_options) if default_options else None
    return cls(**config, default_options=options)

count_tokens #

count_tokens(prompt: BasePrompt) -> int

Counts tokens in the prompt.

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation and response parsing configuration.

TYPE: BasePrompt

RETURNS DESCRIPTION
int

Number of tokens in the prompt.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
def count_tokens(self, prompt: BasePrompt) -> int:  # noqa: PLR6301
    """
    Counts tokens in the prompt.

    Args:
        prompt: Formatted prompt template with conversation and response parsing configuration.

    Returns:
        Number of tokens in the prompt.
    """
    return sum(len(message["content"]) for message in prompt.chat)

generate_raw async #

generate_raw(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> str

Prepares and sends a prompt to the LLM and returns the raw response (without parsing).

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
str

Raw text response from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate_raw(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> str:
    """
    Prepares and sends a prompt to the LLM and returns the raw response (without parsing).

    Args:
        prompt: Formatted prompt template with conversation.
        options: Options to use for the LLM client.

    Returns:
        Raw text response from LLM.
    """
    merged_options = (self.default_options | options) if options else self.default_options
    response = await self.client.call(
        conversation=self._format_chat_for_llm(prompt),
        options=merged_options,
        json_mode=prompt.json_mode,
        output_schema=prompt.output_schema(),
    )

    return response

generate async #

generate(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> OutputT

Prepares and sends a prompt to the LLM and returns response parsed to the output type of the prompt (if available).

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation and optional response parsing configuration.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
OutputT

Text response from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> OutputT:
    """
    Prepares and sends a prompt to the LLM and returns response parsed to the
    output type of the prompt (if available).

    Args:
        prompt: Formatted prompt template with conversation and optional response parsing configuration.
        options: Options to use for the LLM client.

    Returns:
        Text response from LLM.
    """
    response = await self.generate_raw(prompt, options=options)

    if isinstance(prompt, BasePromptWithParser):
        return prompt.parse_response(response)

    return cast(OutputT, response)

generate_streaming async #

generate_streaming(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> AsyncGenerator[str, None]

Prepares and sends a prompt to the LLM and streams the results.

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
AsyncGenerator[str, None]

Response stream from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate_streaming(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> AsyncGenerator[str, None]:
    """
    Prepares and sends a prompt to the LLM and streams the results.

    Args:
        prompt: Formatted prompt template with conversation.
        options: Options to use for the LLM client.

    Returns:
        Response stream from LLM.
    """
    merged_options = (self.default_options | options) if options else self.default_options
    response = await self.client.call_streaming(
        conversation=self._format_chat_for_llm(prompt),
        options=merged_options,
        json_mode=prompt.json_mode,
        output_schema=prompt.output_schema(),
    )
    async for text_piece in response:
        yield text_piece

ragbits.core.llms.local.LocalLLM #

LocalLLM(model_name: str, default_options: LocalLLMOptions | None = None, *, api_key: str | None = None)

Bases: LLM[LocalLLMOptions]

Class for interaction with any LLM available in HuggingFace.

Constructs a new local LLM instance.

PARAMETER DESCRIPTION
model_name

Name of the model to use. This should be a model from the CausalLM class.

TYPE: str

default_options

Default options for the LLM.

TYPE: LocalLLMOptions | None DEFAULT: None

api_key

The API key for Hugging Face authentication.

TYPE: str | None DEFAULT: None

RAISES DESCRIPTION
ImportError

If the 'local' extra requirements are not installed.

Source code in packages/ragbits-core/src/ragbits/core/llms/local.py
def __init__(
    self,
    model_name: str,
    default_options: LocalLLMOptions | None = None,
    *,
    api_key: str | None = None,
) -> None:
    """
    Constructs a new local LLM instance.

    Args:
        model_name: Name of the model to use. This should be a model from the CausalLM class.
        default_options: Default options for the LLM.
        api_key: The API key for Hugging Face authentication.

    Raises:
        ImportError: If the 'local' extra requirements are not installed.
    """
    if not HAS_LOCAL_LLM:
        raise ImportError("You need to install the 'local' extra requirements to use local LLM models")

    super().__init__(model_name, default_options)
    self.tokenizer = AutoTokenizer.from_pretrained(model_name, token=api_key)
    self.api_key = api_key

default_module class-attribute instance-attribute #

default_module: ClassVar = llms

configuration_key class-attribute instance-attribute #

configuration_key: ClassVar = 'llm'

default_options instance-attribute #

default_options: OptionsT = default_options or options_cls()

model_name instance-attribute #

model_name = model_name

options_cls class-attribute instance-attribute #

options_cls = LocalLLMOptions

tokenizer instance-attribute #

tokenizer = from_pretrained(model_name, token=api_key)

api_key instance-attribute #

api_key = api_key

client cached property #

client: LocalLLMClient

Client for the LLM.

RETURNS DESCRIPTION
LocalLLMClient

The client used to interact with the LLM.

subclass_from_config classmethod #

subclass_from_config(config: ObjectContructionConfig) -> Self

Initializes the class with the provided configuration. May return a subclass of the class, if requested by the configuration.

PARAMETER DESCRIPTION
config

A model containing configuration details for the class.

TYPE: ObjectContructionConfig

RETURNS DESCRIPTION
Self

An instance of the class initialized with the provided configuration.

RAISES DESCRIPTION
InvalidConfigError

The class can't be found or is not a subclass of the current class.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_config(cls, config: ObjectContructionConfig) -> Self:
    """
    Initializes the class with the provided configuration. May return a subclass of the class,
    if requested by the configuration.

    Args:
        config: A model containing configuration details for the class.

    Returns:
        An instance of the class initialized with the provided configuration.

    Raises:
        InvalidConfigError: The class can't be found or is not a subclass of the current class.
    """
    subclass = import_by_path(config.type, cls.default_module)
    if not issubclass(subclass, cls):
        raise InvalidConfigError(f"{subclass} is not a subclass of {cls}")

    return subclass.from_config(config.config)

subclass_from_factory classmethod #

subclass_from_factory(factory_path: str) -> Self

Creates the class using the provided factory function. May return a subclass of the class, if requested by the factory.

PARAMETER DESCRIPTION
factory_path

A string representing the path to the factory function in the format of "module.submodule:factory_name".

TYPE: str

RETURNS DESCRIPTION
Self

An instance of the class initialized with the provided factory function.

RAISES DESCRIPTION
InvalidConfigError

The factory can't be found or the object returned is not a subclass of the current class.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_factory(cls, factory_path: str) -> Self:
    """
    Creates the class using the provided factory function. May return a subclass of the class,
    if requested by the factory.

    Args:
        factory_path: A string representing the path to the factory function
            in the format of "module.submodule:factory_name".

    Returns:
        An instance of the class initialized with the provided factory function.

    Raises:
        InvalidConfigError: The factory can't be found or the object returned
            is not a subclass of the current class.
    """
    factory = import_by_path(factory_path, cls.default_module)
    obj = factory()
    if not isinstance(obj, cls):
        raise InvalidConfigError(f"The object returned by factory {factory_path} is not an instance of {cls}")
    return obj

subclass_from_defaults classmethod #

subclass_from_defaults(defaults: CoreConfig, factory_path_override: str | None = None, yaml_path_override: Path | None = None) -> Self

Tries to create an instance by looking at default configuration file, and default factory function. Takes optional overrides for both, which takes a higher precedence.

PARAMETER DESCRIPTION
defaults

The CoreConfig instance containing default factory and configuration details.

TYPE: CoreConfig

factory_path_override

A string representing the path to the factory function in the format of "module.submodule:factory_name".

TYPE: str | None DEFAULT: None

yaml_path_override

A string representing the path to the YAML file containing the Ragstack instance configuration.

TYPE: Path | None DEFAULT: None

RAISES DESCRIPTION
InvalidConfigError

If the default factory or configuration can't be found.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_defaults(
    cls, defaults: CoreConfig, factory_path_override: str | None = None, yaml_path_override: Path | None = None
) -> Self:
    """
    Tries to create an instance by looking at default configuration file, and default factory function.
    Takes optional overrides for both, which takes a higher precedence.

    Args:
        defaults: The CoreConfig instance containing default factory and configuration details.
        factory_path_override: A string representing the path to the factory function
            in the format of "module.submodule:factory_name".
        yaml_path_override: A string representing the path to the YAML file containing
            the Ragstack instance configuration.

    Raises:
        InvalidConfigError: If the default factory or configuration can't be found.
    """
    if yaml_path_override:
        config = get_config_from_yaml(yaml_path_override)
        if type_config := config.get(cls.configuration_key):
            return cls.subclass_from_config(ObjectContructionConfig.model_validate(type_config))

    if factory_path_override:
        return cls.subclass_from_factory(factory_path_override)

    if default_factory := defaults.default_factories.get(cls.configuration_key):
        return cls.subclass_from_factory(default_factory)

    if default_config := defaults.default_instances_config.get(cls.configuration_key):
        return cls.subclass_from_config(ObjectContructionConfig.model_validate(default_config))

    raise NoDefaultConfigError(f"Could not find default factory or configuration for {cls.configuration_key}")

from_config classmethod #

from_config(config: dict[str, Any]) -> ConfigurableComponent

Initializes the class with the provided configuration.

PARAMETER DESCRIPTION
config

A dictionary containing configuration details for the class.

TYPE: dict[str, Any]

RETURNS DESCRIPTION
ConfigurableComponent

An instance of the class initialized with the provided configuration.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def from_config(cls, config: dict[str, Any]) -> ConfigurableComponent:
    """
    Initializes the class with the provided configuration.

    Args:
        config: A dictionary containing configuration details for the class.

    Returns:
        An instance of the class initialized with the provided configuration.
    """
    default_options = config.pop("default_options", None)
    options = cls.options_cls(**default_options) if default_options else None
    return cls(**config, default_options=options)

generate_raw async #

generate_raw(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> str

Prepares and sends a prompt to the LLM and returns the raw response (without parsing).

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
str

Raw text response from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate_raw(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> str:
    """
    Prepares and sends a prompt to the LLM and returns the raw response (without parsing).

    Args:
        prompt: Formatted prompt template with conversation.
        options: Options to use for the LLM client.

    Returns:
        Raw text response from LLM.
    """
    merged_options = (self.default_options | options) if options else self.default_options
    response = await self.client.call(
        conversation=self._format_chat_for_llm(prompt),
        options=merged_options,
        json_mode=prompt.json_mode,
        output_schema=prompt.output_schema(),
    )

    return response

generate async #

generate(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> OutputT

Prepares and sends a prompt to the LLM and returns response parsed to the output type of the prompt (if available).

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation and optional response parsing configuration.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
OutputT

Text response from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> OutputT:
    """
    Prepares and sends a prompt to the LLM and returns response parsed to the
    output type of the prompt (if available).

    Args:
        prompt: Formatted prompt template with conversation and optional response parsing configuration.
        options: Options to use for the LLM client.

    Returns:
        Text response from LLM.
    """
    response = await self.generate_raw(prompt, options=options)

    if isinstance(prompt, BasePromptWithParser):
        return prompt.parse_response(response)

    return cast(OutputT, response)

generate_streaming async #

generate_streaming(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> AsyncGenerator[str, None]

Prepares and sends a prompt to the LLM and streams the results.

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
AsyncGenerator[str, None]

Response stream from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate_streaming(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> AsyncGenerator[str, None]:
    """
    Prepares and sends a prompt to the LLM and streams the results.

    Args:
        prompt: Formatted prompt template with conversation.
        options: Options to use for the LLM client.

    Returns:
        Response stream from LLM.
    """
    merged_options = (self.default_options | options) if options else self.default_options
    response = await self.client.call_streaming(
        conversation=self._format_chat_for_llm(prompt),
        options=merged_options,
        json_mode=prompt.json_mode,
        output_schema=prompt.output_schema(),
    )
    async for text_piece in response:
        yield text_piece

count_tokens #

count_tokens(prompt: BasePrompt) -> int

Counts tokens in the messages.

PARAMETER DESCRIPTION
prompt

Messages to count tokens for.

TYPE: BasePrompt

RETURNS DESCRIPTION
int

Number of tokens in the messages.

Source code in packages/ragbits-core/src/ragbits/core/llms/local.py
def count_tokens(self, prompt: BasePrompt) -> int:
    """
    Counts tokens in the messages.

    Args:
        prompt: Messages to count tokens for.

    Returns:
        Number of tokens in the messages.
    """
    input_ids = self.tokenizer.apply_chat_template(prompt.chat)
    return len(input_ids)

ragbits.core.llms.litellm.LiteLLM #

LiteLLM(model_name: str = 'gpt-3.5-turbo', default_options: LiteLLMOptions | None = None, *, base_url: str | None = None, api_key: str | None = None, api_version: str | None = None, use_structured_output: bool = False, router: Router | None = None)

Bases: LLM[LiteLLMOptions]

Class for interaction with any LLM supported by LiteLLM API.

Constructs a new LiteLLM instance.

PARAMETER DESCRIPTION
model_name

Name of the LiteLLM supported model to be used. Default is "gpt-3.5-turbo".

TYPE: str DEFAULT: 'gpt-3.5-turbo'

default_options

Default options to be used.

TYPE: LiteLLMOptions | None DEFAULT: None

base_url

Base URL of the LLM API.

TYPE: str | None DEFAULT: None

api_key

API key to be used. API key to be used. If not specified, an environment variable will be used, for more information, follow the instructions for your specific vendor in the LiteLLM documentation.

TYPE: str | None DEFAULT: None

api_version

API version to be used. If not specified, the default version will be used.

TYPE: str | None DEFAULT: None

use_structured_output

Whether to request a structured output from the model. Default is False. Can only be combined with models that support structured output.

TYPE: bool DEFAULT: False

router

Router to be used to route requests to different models.

TYPE: Router | None DEFAULT: None

Source code in packages/ragbits-core/src/ragbits/core/llms/litellm.py
def __init__(
    self,
    model_name: str = "gpt-3.5-turbo",
    default_options: LiteLLMOptions | None = None,
    *,
    base_url: str | None = None,
    api_key: str | None = None,
    api_version: str | None = None,
    use_structured_output: bool = False,
    router: litellm.Router | None = None,
) -> None:
    """
    Constructs a new LiteLLM instance.

    Args:
        model_name: Name of the [LiteLLM supported model](https://docs.litellm.ai/docs/providers) to be used.\
            Default is "gpt-3.5-turbo".
        default_options: Default options to be used.
        base_url: Base URL of the LLM API.
        api_key: API key to be used. API key to be used. If not specified, an environment variable will be used,
            for more information, follow the instructions for your specific vendor in the\
            [LiteLLM documentation](https://docs.litellm.ai/docs/providers).
        api_version: API version to be used. If not specified, the default version will be used.
        use_structured_output: Whether to request a
            [structured output](https://docs.litellm.ai/docs/completion/json_mode#pass-in-json_schema)
            from the model. Default is False. Can only be combined with models that support structured output.
        router: Router to be used to [route requests](https://docs.litellm.ai/docs/routing) to different models.
    """
    super().__init__(model_name, default_options)
    self.base_url = base_url
    self.api_key = api_key
    self.api_version = api_version
    self.use_structured_output = use_structured_output
    self.router = router

default_module class-attribute instance-attribute #

default_module: ClassVar = llms

configuration_key class-attribute instance-attribute #

configuration_key: ClassVar = 'llm'

default_options instance-attribute #

default_options: OptionsT = default_options or options_cls()

model_name instance-attribute #

model_name = model_name

options_cls class-attribute instance-attribute #

options_cls = LiteLLMOptions

base_url instance-attribute #

base_url = base_url

api_key instance-attribute #

api_key = api_key

api_version instance-attribute #

api_version = api_version

use_structured_output instance-attribute #

use_structured_output = use_structured_output

router instance-attribute #

router = router

client cached property #

client: LiteLLMClient

Client for the LLM.

subclass_from_config classmethod #

subclass_from_config(config: ObjectContructionConfig) -> Self

Initializes the class with the provided configuration. May return a subclass of the class, if requested by the configuration.

PARAMETER DESCRIPTION
config

A model containing configuration details for the class.

TYPE: ObjectContructionConfig

RETURNS DESCRIPTION
Self

An instance of the class initialized with the provided configuration.

RAISES DESCRIPTION
InvalidConfigError

The class can't be found or is not a subclass of the current class.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_config(cls, config: ObjectContructionConfig) -> Self:
    """
    Initializes the class with the provided configuration. May return a subclass of the class,
    if requested by the configuration.

    Args:
        config: A model containing configuration details for the class.

    Returns:
        An instance of the class initialized with the provided configuration.

    Raises:
        InvalidConfigError: The class can't be found or is not a subclass of the current class.
    """
    subclass = import_by_path(config.type, cls.default_module)
    if not issubclass(subclass, cls):
        raise InvalidConfigError(f"{subclass} is not a subclass of {cls}")

    return subclass.from_config(config.config)

subclass_from_factory classmethod #

subclass_from_factory(factory_path: str) -> Self

Creates the class using the provided factory function. May return a subclass of the class, if requested by the factory.

PARAMETER DESCRIPTION
factory_path

A string representing the path to the factory function in the format of "module.submodule:factory_name".

TYPE: str

RETURNS DESCRIPTION
Self

An instance of the class initialized with the provided factory function.

RAISES DESCRIPTION
InvalidConfigError

The factory can't be found or the object returned is not a subclass of the current class.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_factory(cls, factory_path: str) -> Self:
    """
    Creates the class using the provided factory function. May return a subclass of the class,
    if requested by the factory.

    Args:
        factory_path: A string representing the path to the factory function
            in the format of "module.submodule:factory_name".

    Returns:
        An instance of the class initialized with the provided factory function.

    Raises:
        InvalidConfigError: The factory can't be found or the object returned
            is not a subclass of the current class.
    """
    factory = import_by_path(factory_path, cls.default_module)
    obj = factory()
    if not isinstance(obj, cls):
        raise InvalidConfigError(f"The object returned by factory {factory_path} is not an instance of {cls}")
    return obj

subclass_from_defaults classmethod #

subclass_from_defaults(defaults: CoreConfig, factory_path_override: str | None = None, yaml_path_override: Path | None = None) -> Self

Tries to create an instance by looking at default configuration file, and default factory function. Takes optional overrides for both, which takes a higher precedence.

PARAMETER DESCRIPTION
defaults

The CoreConfig instance containing default factory and configuration details.

TYPE: CoreConfig

factory_path_override

A string representing the path to the factory function in the format of "module.submodule:factory_name".

TYPE: str | None DEFAULT: None

yaml_path_override

A string representing the path to the YAML file containing the Ragstack instance configuration.

TYPE: Path | None DEFAULT: None

RAISES DESCRIPTION
InvalidConfigError

If the default factory or configuration can't be found.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def subclass_from_defaults(
    cls, defaults: CoreConfig, factory_path_override: str | None = None, yaml_path_override: Path | None = None
) -> Self:
    """
    Tries to create an instance by looking at default configuration file, and default factory function.
    Takes optional overrides for both, which takes a higher precedence.

    Args:
        defaults: The CoreConfig instance containing default factory and configuration details.
        factory_path_override: A string representing the path to the factory function
            in the format of "module.submodule:factory_name".
        yaml_path_override: A string representing the path to the YAML file containing
            the Ragstack instance configuration.

    Raises:
        InvalidConfigError: If the default factory or configuration can't be found.
    """
    if yaml_path_override:
        config = get_config_from_yaml(yaml_path_override)
        if type_config := config.get(cls.configuration_key):
            return cls.subclass_from_config(ObjectContructionConfig.model_validate(type_config))

    if factory_path_override:
        return cls.subclass_from_factory(factory_path_override)

    if default_factory := defaults.default_factories.get(cls.configuration_key):
        return cls.subclass_from_factory(default_factory)

    if default_config := defaults.default_instances_config.get(cls.configuration_key):
        return cls.subclass_from_config(ObjectContructionConfig.model_validate(default_config))

    raise NoDefaultConfigError(f"Could not find default factory or configuration for {cls.configuration_key}")

from_config classmethod #

from_config(config: dict[str, Any]) -> ConfigurableComponent

Initializes the class with the provided configuration.

PARAMETER DESCRIPTION
config

A dictionary containing configuration details for the class.

TYPE: dict[str, Any]

RETURNS DESCRIPTION
ConfigurableComponent

An instance of the class initialized with the provided configuration.

Source code in packages/ragbits-core/src/ragbits/core/utils/config_handling.py
@classmethod
def from_config(cls, config: dict[str, Any]) -> ConfigurableComponent:
    """
    Initializes the class with the provided configuration.

    Args:
        config: A dictionary containing configuration details for the class.

    Returns:
        An instance of the class initialized with the provided configuration.
    """
    default_options = config.pop("default_options", None)
    options = cls.options_cls(**default_options) if default_options else None
    return cls(**config, default_options=options)

generate_raw async #

generate_raw(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> str

Prepares and sends a prompt to the LLM and returns the raw response (without parsing).

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
str

Raw text response from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate_raw(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> str:
    """
    Prepares and sends a prompt to the LLM and returns the raw response (without parsing).

    Args:
        prompt: Formatted prompt template with conversation.
        options: Options to use for the LLM client.

    Returns:
        Raw text response from LLM.
    """
    merged_options = (self.default_options | options) if options else self.default_options
    response = await self.client.call(
        conversation=self._format_chat_for_llm(prompt),
        options=merged_options,
        json_mode=prompt.json_mode,
        output_schema=prompt.output_schema(),
    )

    return response

generate async #

generate(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> OutputT

Prepares and sends a prompt to the LLM and returns response parsed to the output type of the prompt (if available).

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation and optional response parsing configuration.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
OutputT

Text response from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> OutputT:
    """
    Prepares and sends a prompt to the LLM and returns response parsed to the
    output type of the prompt (if available).

    Args:
        prompt: Formatted prompt template with conversation and optional response parsing configuration.
        options: Options to use for the LLM client.

    Returns:
        Text response from LLM.
    """
    response = await self.generate_raw(prompt, options=options)

    if isinstance(prompt, BasePromptWithParser):
        return prompt.parse_response(response)

    return cast(OutputT, response)

generate_streaming async #

generate_streaming(prompt: BasePrompt, *, options: LLMClientOptionsT | None = None) -> AsyncGenerator[str, None]

Prepares and sends a prompt to the LLM and streams the results.

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation.

TYPE: BasePrompt

options

Options to use for the LLM client.

TYPE: LLMClientOptionsT | None DEFAULT: None

RETURNS DESCRIPTION
AsyncGenerator[str, None]

Response stream from LLM.

Source code in packages/ragbits-core/src/ragbits/core/llms/base.py
async def generate_streaming(
    self,
    prompt: BasePrompt,
    *,
    options: LLMClientOptionsT | None = None,
) -> AsyncGenerator[str, None]:
    """
    Prepares and sends a prompt to the LLM and streams the results.

    Args:
        prompt: Formatted prompt template with conversation.
        options: Options to use for the LLM client.

    Returns:
        Response stream from LLM.
    """
    merged_options = (self.default_options | options) if options else self.default_options
    response = await self.client.call_streaming(
        conversation=self._format_chat_for_llm(prompt),
        options=merged_options,
        json_mode=prompt.json_mode,
        output_schema=prompt.output_schema(),
    )
    async for text_piece in response:
        yield text_piece

count_tokens #

count_tokens(prompt: BasePrompt) -> int

Counts tokens in the prompt.

PARAMETER DESCRIPTION
prompt

Formatted prompt template with conversation and response parsing configuration.

TYPE: BasePrompt

RETURNS DESCRIPTION
int

Number of tokens in the prompt.

Source code in packages/ragbits-core/src/ragbits/core/llms/litellm.py
def count_tokens(self, prompt: BasePrompt) -> int:
    """
    Counts tokens in the prompt.

    Args:
        prompt: Formatted prompt template with conversation and response parsing configuration.

    Returns:
        Number of tokens in the prompt.
    """
    return sum(litellm.token_counter(model=self.model_name, text=message["content"]) for message in prompt.chat)