class MistralModel(BaseModelBackend):
Mistral.chat.complete()
.None
, :obj:MistralConfig().as_dict()
will be used. (default: :obj:None
)None
)None
)OpenAITokenCounter
will be used. (default: :obj:None
)None
)3
) **kwargs (Any): Additional arguments to pass to the client initialization.def __init__(
self,
model_type: Union[ModelType, str],
model_config_dict: Optional[Dict[str, Any]] = None,
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
timeout: Optional[float] = None,
max_retries: int = 3,
**kwargs: Any
):
def _to_openai_response(self, response: 'ChatCompletionResponse'):
def _to_mistral_chatmessage(self, messages: List[OpenAIMessage]):
def token_counter(self):
def _run(
self,
messages: List[OpenAIMessage],
response_format: Optional[Type[BaseModel]] = None,
tools: Optional[List[Dict[str, Any]]] = None
):
def _prepare_request(
self,
messages: List[OpenAIMessage],
response_format: Optional[Type[BaseModel]] = None,
tools: Optional[List[Dict[str, Any]]] = None
):
def check_model_config(self):
def stream(self):