camel.data_collector package

On this page

camel.data_collector package#

Submodules#

camel.data_collector.alpaca_collector module#

class camel.data_collector.alpaca_collector.AlpacaDataCollector[source]#

Bases: BaseDataCollector

convert() Dict[str, Any][source]#

Convert the collected data into a dictionary.

llm_convert(converter: OpenAISchemaConverter | None = None, prompt: str | None = None) Dict[str, str][source]#

Convert collected data using an LLM schema converter.

Parameters:
  • converter (Optional[OpenAISchemaConverter], optional) – The converter to use. (default: OpenAISchemaConverter)

  • prompt (Optional[str], optional) – Prompt to guide the conversion. (default: DEFAULT_CONVERTER_PROMPTS)

Returns:

The converted data.

Return type:

Dict[str, str]

Raises:

ValueError – If no agent is injected or data cannot be collected.

record(agent: List[ChatAgent] | ChatAgent) Self[source]#

Inject an agent into the data collector.

Parameters:

agent (Union[List[ChatAgent], ChatAgent]) – The agent to inject.

camel.data_collector.base module#

class camel.data_collector.base.BaseDataCollector[source]#

Bases: ABC

Base class for data collectors.

abstract convert() Any[source]#

Convert the collected data.

get_agent_history(name: str) List[CollectorData][source]#

Get the message history of an agent.

Parameters:

name (str) – The name of the agent.

Returns:

The message history of the agent

Return type:

List[CollectorData]

abstract llm_convert(converter: Any, prompt: str | None = None) Any[source]#

Convert the collected data.

record(agent: List[ChatAgent] | ChatAgent) Self[source]#

Record agents.

Parameters:

agent (Union[List[ChatAgent], ChatAgent]) – The agent(s) to inject.

property recording: bool#

Whether the collector is recording.

reset(reset_agents: bool = True)[source]#

Reset the collector.

Parameters:

reset_agents (bool, optional) – Whether to reset the agents. Defaults to True.

start() Self[source]#

Start recording.

step(role: Literal['user', 'assistant', 'system', 'tool'], name: str | None = None, message: str | None = None, function_call: Dict[str, Any] | None = None) Self[source]#

Record a message.

Parameters:
  • role (Literal["user", "assistant", "system", "tool"]) – The role of the message.

  • name (Optional[str], optional) – The name of the agent. (default: None)

  • message (Optional[str], optional) – The message to record. (default: None)

  • function_call (Optional[Dict[str, Any]], optional) – The function call to record. (default: None)

Returns:

The data collector.

Return type:

Self

stop() Self[source]#

Stop recording.

class camel.data_collector.base.CollectorData(id: UUID, name: str, role: Literal['user', 'assistant', 'system', 'tool'], message: str | None = None, function_call: Dict[str, Any] | None = None)[source]#

Bases: object

static from_context(name, context: Dict[str, Any]) CollectorData[source]#

Create a data collector from a context.

Parameters:
  • name (str) – The name of the agent.

  • context (Dict[str, Any]) – The context.

Returns:

The data collector.

Return type:

CollectorData

camel.data_collector.sharegpt_collector module#

class camel.data_collector.sharegpt_collector.ConversationItem(*, from_: Literal['human', 'gpt', 'function_call', 'observation'], value: str)[source]#

Bases: BaseModel

class Config[source]#

Bases: object

extra = 'forbid'#
fields: ClassVar[Dict[str, str]] = {'from_': 'from'}#
from_: Literal['human', 'gpt', 'function_call', 'observation']#
model_computed_fields: ClassVar[Dict[str, ComputedFieldInfo]] = {}#

A dictionary of computed field names and their corresponding ComputedFieldInfo objects.

model_config: ClassVar[ConfigDict] = {'extra': 'forbid', 'fields': {'from_': 'from'}}#

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

model_fields: ClassVar[Dict[str, FieldInfo]] = {'from_': FieldInfo(annotation=Literal['human', 'gpt', 'function_call', 'observation'], required=True), 'value': FieldInfo(annotation=str, required=True)}#

Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo] objects.

This replaces Model.__fields__ from Pydantic V1.

value: str#
class camel.data_collector.sharegpt_collector.ShareGPTData(*, system: str, tools: str, conversations: List[ConversationItem])[source]#

Bases: BaseModel

class Config[source]#

Bases: object

extra = 'forbid'#
conversations: List[ConversationItem]#
model_computed_fields: ClassVar[Dict[str, ComputedFieldInfo]] = {}#

A dictionary of computed field names and their corresponding ComputedFieldInfo objects.

model_config: ClassVar[ConfigDict] = {'extra': 'forbid'}#

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

model_fields: ClassVar[Dict[str, FieldInfo]] = {'conversations': FieldInfo(annotation=List[ConversationItem], required=True), 'system': FieldInfo(annotation=str, required=True), 'tools': FieldInfo(annotation=str, required=True)}#

Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo] objects.

This replaces Model.__fields__ from Pydantic V1.

system: str#
tools: str#
class camel.data_collector.sharegpt_collector.ShareGPTDataCollector[source]#

Bases: BaseDataCollector

convert() Dict[str, Any][source]#

Convert the collected data into a dictionary.

llm_convert(converter: OpenAISchemaConverter | None = None, prompt: str | None = None) Dict[str, Any][source]#

Convert collected data using an LLM schema converter.

Parameters:
  • converter (Optional[OpenAISchemaConverter], optional) – The converter to use. (default: OpenAISchemaConverter)

  • prompt (Optional[str], optional) – Prompt to guide the conversion. (default: DEFAULT_CONVERTER_PROMPTS)

Returns:

The converted data.

Return type:

Dict[str, str]

Raises:

ValueError – If no agent is injected or data cannot be collected.

record(agent: List[ChatAgent] | ChatAgent) Self[source]#

Inject an agent into the data collector.

static to_sharegpt_conversation(data: Dict[str, Any]) ShareGPTConversation[source]#

Module contents#

class camel.data_collector.AlpacaDataCollector[source]#

Bases: BaseDataCollector

convert() Dict[str, Any][source]#

Convert the collected data into a dictionary.

llm_convert(converter: OpenAISchemaConverter | None = None, prompt: str | None = None) Dict[str, str][source]#

Convert collected data using an LLM schema converter.

Parameters:
  • converter (Optional[OpenAISchemaConverter], optional) – The converter to use. (default: OpenAISchemaConverter)

  • prompt (Optional[str], optional) – Prompt to guide the conversion. (default: DEFAULT_CONVERTER_PROMPTS)

Returns:

The converted data.

Return type:

Dict[str, str]

Raises:

ValueError – If no agent is injected or data cannot be collected.

record(agent: List[ChatAgent] | ChatAgent) Self[source]#

Inject an agent into the data collector.

Parameters:

agent (Union[List[ChatAgent], ChatAgent]) – The agent to inject.

class camel.data_collector.BaseDataCollector[source]#

Bases: ABC

Base class for data collectors.

abstract convert() Any[source]#

Convert the collected data.

get_agent_history(name: str) List[CollectorData][source]#

Get the message history of an agent.

Parameters:

name (str) – The name of the agent.

Returns:

The message history of the agent

Return type:

List[CollectorData]

abstract llm_convert(converter: Any, prompt: str | None = None) Any[source]#

Convert the collected data.

record(agent: List[ChatAgent] | ChatAgent) Self[source]#

Record agents.

Parameters:

agent (Union[List[ChatAgent], ChatAgent]) – The agent(s) to inject.

property recording: bool#

Whether the collector is recording.

reset(reset_agents: bool = True)[source]#

Reset the collector.

Parameters:

reset_agents (bool, optional) – Whether to reset the agents. Defaults to True.

start() Self[source]#

Start recording.

step(role: Literal['user', 'assistant', 'system', 'tool'], name: str | None = None, message: str | None = None, function_call: Dict[str, Any] | None = None) Self[source]#

Record a message.

Parameters:
  • role (Literal["user", "assistant", "system", "tool"]) – The role of the message.

  • name (Optional[str], optional) – The name of the agent. (default: None)

  • message (Optional[str], optional) – The message to record. (default: None)

  • function_call (Optional[Dict[str, Any]], optional) – The function call to record. (default: None)

Returns:

The data collector.

Return type:

Self

stop() Self[source]#

Stop recording.

class camel.data_collector.ShareGPTDataCollector[source]#

Bases: BaseDataCollector

convert() Dict[str, Any][source]#

Convert the collected data into a dictionary.

llm_convert(converter: OpenAISchemaConverter | None = None, prompt: str | None = None) Dict[str, Any][source]#

Convert collected data using an LLM schema converter.

Parameters:
  • converter (Optional[OpenAISchemaConverter], optional) – The converter to use. (default: OpenAISchemaConverter)

  • prompt (Optional[str], optional) – Prompt to guide the conversion. (default: DEFAULT_CONVERTER_PROMPTS)

Returns:

The converted data.

Return type:

Dict[str, str]

Raises:

ValueError – If no agent is injected or data cannot be collected.

record(agent: List[ChatAgent] | ChatAgent) Self[source]#

Inject an agent into the data collector.

static to_sharegpt_conversation(data: Dict[str, Any]) ShareGPTConversation[source]#