class DeepSeekModel(OpenAICompatibleModel):
openai.ChatCompletion.create()
. If :obj:None
, :obj:DeepSeekConfig().as_dict()
will be used. (default: :obj:None
)None
)https://api.deepseek.com
)OpenAITokenCounter
will be used. (default: :obj:None
)None
)3
) **kwargs (Any): Additional arguments to pass to the client initialization.def __init__(
self,
model_type: Union[ModelType, str],
model_config_dict: Optional[Dict[str, Any]] = None,
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
timeout: Optional[float] = None,
max_retries: int = 3,
**kwargs: Any
):
def _prepare_request(
self,
messages: List[OpenAIMessage],
response_format: Optional[Type[BaseModel]] = None,
tools: Optional[List[Dict[str, Any]]] = None
):
def _post_handle_response(self, response: ChatCompletion):
<think>
tags at the beginning.
def _run(
self,
messages: List[OpenAIMessage],
response_format: Optional[Type[BaseModel]] = None,
tools: Optional[List[Dict[str, Any]]] = None
):
ChatCompletion
in the non-stream mode, or
Stream[ChatCompletionChunk]
in the stream mode.
def check_model_config(self):