You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
67 lines
2.2 KiB
67 lines
2.2 KiB
import os
|
|
|
|
from deepsearcher.llm.base import BaseLLM
|
|
|
|
|
|
class OpenAILLM(BaseLLM):
|
|
"""
|
|
OpenAI language model implementation.
|
|
|
|
This class provides an interface to interact with OpenAI's language models
|
|
through their API.
|
|
|
|
Attributes:
|
|
model (str): The OpenAI model identifier to use.
|
|
client: The OpenAI client instance.
|
|
"""
|
|
|
|
def __init__(self, model: str = "o1-mini", **kwargs):
|
|
"""
|
|
Initialize an OpenAI language model client.
|
|
|
|
Args:
|
|
model (str, optional): The model identifier to use. Defaults to "o1-mini".
|
|
**kwargs: Additional keyword arguments to pass to the OpenAI client.
|
|
- api_key: OpenAI API key. If not provided, uses OPENAI_API_KEY environment variable.
|
|
- base_url: OpenAI API base URL. If not provided, uses OPENAI_BASE_URL environment variable.
|
|
"""
|
|
from openai import OpenAI
|
|
|
|
self.model = model
|
|
if "api_key" in kwargs:
|
|
api_key = kwargs.pop("api_key")
|
|
else:
|
|
api_key = os.getenv("OPENAI_API_KEY")
|
|
if "base_url" in kwargs:
|
|
base_url = kwargs.pop("base_url")
|
|
else:
|
|
base_url = os.getenv("OPENAI_BASE_URL")
|
|
self.client = OpenAI(api_key=api_key, base_url=base_url, **kwargs)
|
|
|
|
def chat(self, messages: list[dict], stream_callback = None) -> str:
|
|
"""
|
|
Send a chat message to the OpenAI model and get a response.
|
|
|
|
Args:
|
|
messages (List[Dict]):
|
|
A list of message dictionaries, typically in the format
|
|
[{"role": "system", "content": "..."}, {"role": "user", "content": "..."}]
|
|
|
|
Returns:
|
|
response (str)
|
|
"""
|
|
completion = self.client.chat.completions.create(
|
|
model=self.model,
|
|
messages=messages,
|
|
stream=True
|
|
)
|
|
response = ""
|
|
for chunk in completion:
|
|
stream_response = chunk.choices[0].delta.content
|
|
if stream_response:
|
|
print(stream_response, end="", flush=True)
|
|
response += stream_response
|
|
if stream_callback:
|
|
stream_callback(stream_response)
|
|
print("\n")
|
|
return response
|
|
|