Source code for pipecat.services.groq.llm

#
# Copyright (c) 2024–2025, Daily
#
# SPDX-License-Identifier: BSD 2-Clause License
#

from loguru import logger

from pipecat.services.openai.llm import OpenAILLMService


[docs] class GroqLLMService(OpenAILLMService): """A service for interacting with Groq's API using the OpenAI-compatible interface. This service extends OpenAILLMService to connect to Groq's API endpoint while maintaining full compatibility with OpenAI's interface and functionality. Args: api_key (str): The API key for accessing Groq's API base_url (str, optional): The base URL for Groq API. Defaults to "https://api.groq.com/openai/v1" model (str, optional): The model identifier to use. Defaults to "llama-3.3-70b-versatile" **kwargs: Additional keyword arguments passed to OpenAILLMService """ def __init__( self, *, api_key: str, base_url: str = "https://api.groq.com/openai/v1", model: str = "llama-3.3-70b-versatile", **kwargs, ): super().__init__(api_key=api_key, base_url=base_url, model=model, **kwargs)
[docs] def create_client(self, api_key=None, base_url=None, **kwargs): """Create OpenAI-compatible client for Groq API endpoint.""" logger.debug(f"Creating Groq client with api {base_url}") return super().create_client(api_key, base_url, **kwargs)