Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/bug_report.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ body:
attributes:
label: What version of camel are you using?
description: Run command `python3 -c 'print(__import__("camel").__version__)'` in your shell and paste the output here.
placeholder: E.g., 0.2.91a0
placeholder: E.g., 0.2.91a1
validations:
required: true

Expand Down
2 changes: 1 addition & 1 deletion camel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from camel.logger import disable_logging, enable_logging, set_log_level

__version__ = '0.2.91a0'
__version__ = '0.2.91a1'

__all__ = [
'__version__',
Expand Down
3 changes: 3 additions & 0 deletions camel/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
from .vllm_config import VLLM_API_PARAMS, VLLMConfig
from .volcano_config import VOLCANO_API_PARAMS, VolcanoConfig
from .watsonx_config import WATSONX_API_PARAMS, WatsonXConfig
from .xai_config import XAI_API_PARAMS, XAIConfig
from .yi_config import YI_API_PARAMS, YiConfig
from .zhipuai_config import ZHIPUAI_API_PARAMS, ZhipuAIConfig

Expand Down Expand Up @@ -151,4 +152,6 @@
'VOLCANO_API_PARAMS',
'AvianConfig',
'AVIAN_API_PARAMS',
'XAIConfig',
'XAI_API_PARAMS',
]
89 changes: 89 additions & 0 deletions camel/configs/xai_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# ========= Copyright 2023-2026 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2026 @ CAMEL-AI.org. All Rights Reserved. =========

from __future__ import annotations

from typing import Dict, Optional, Sequence, Type, Union

from pydantic import BaseModel

from camel.configs.base_config import BaseConfig


class XAIConfig(BaseConfig):
r"""Defines the parameters for generating chat completions using the
xAI native SDK (gRPC).

Args:
temperature (float, optional): Sampling temperature to use, between
:obj:`0` and :obj:`2`. Higher values make the output more random,
while lower values make it more focused and deterministic.
(default: :obj:`None`)
top_p (float, optional): An alternative to sampling with temperature,
called nucleus sampling, where the model considers the results of
the tokens with top_p probability mass. So :obj:`0.1` means only
the tokens comprising the top 10% probability mass are considered.
(default: :obj:`None`)
max_tokens (int, optional): The maximum number of tokens to generate
in the chat completion. (default: :obj:`None`)
stop (str or list, optional): Up to :obj:`4` sequences where the API
will stop generating further tokens. (default: :obj:`None`)
stream (bool, optional): If True, partial message deltas will be sent
as they become available. (default: :obj:`None`)
response_format (object, optional): An object specifying the format
that the model must output. Setting to a Pydantic BaseModel enables
JSON schema mode for structured outputs.
(default: :obj:`None`)
tool_choice (Union[dict[str, str], str], optional): Controls which (if
any) tool is called by the model. :obj:`"none"` means the model
will not call any tool. :obj:`"auto"` means the model can pick
between generating a message or calling one or more tools.
:obj:`"required"` means the model must call one or more tools.
(default: :obj:`None`)
reasoning_effort (str, optional): Controls the reasoning effort for
reasoning models. Valid values: :obj:`"low"`, :obj:`"medium"`,
:obj:`"high"`. (default: :obj:`None`)
use_encrypted_content (bool, optional): If True, encrypted reasoning
traces will be returned and preserved across conversation turns.
This is required for multi-turn reasoning with thinking models.
(default: :obj:`None`)
store_messages (bool, optional): If True, request/response history is
stored on xAI's servers for up to 30 days, enabling conversation
chaining via previous_response_id. If False, history is managed
locally. (default: :obj:`None`)
frequency_penalty (float, optional): Penalizes new tokens based on
their existing frequency in the text so far. Values between
:obj:`-2.0` and :obj:`2.0`. (default: :obj:`None`)
presence_penalty (float, optional): Penalizes new tokens based on
whether they appear in the text so far. Values between
:obj:`-2.0` and :obj:`2.0`. (default: :obj:`None`)
"""

temperature: Optional[float] = None
top_p: Optional[float] = None
max_tokens: Optional[int] = None
stop: Optional[Union[str, Sequence[str]]] = None
stream: Optional[bool] = None
response_format: Optional[Union[Type[BaseModel], dict]] = None
tool_choice: Optional[
Union[Dict[str, Union[str, Dict[str, str]]], str]
] = None
reasoning_effort: Optional[str] = None
use_encrypted_content: Optional[bool] = None
store_messages: Optional[bool] = None
frequency_penalty: Optional[float] = None
presence_penalty: Optional[float] = None


XAI_API_PARAMS = {param for param in XAIConfig.model_fields.keys()}
10 changes: 6 additions & 4 deletions camel/embeddings/vlm_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,11 +110,11 @@ def embed_list(
images=obj,
return_tensors="pt",
padding=True,
**image_processor_kwargs,
**(image_processor_kwargs or {}),
)
image_feature = (
self.model.get_image_features(
**image_input, **model_kwargs
**image_input, **(model_kwargs or {})
)
.squeeze(dim=0)
.tolist()
Expand All @@ -125,10 +125,12 @@ def embed_list(
text=obj,
return_tensors="pt",
padding=True,
**tokenizer_kwargs,
**(tokenizer_kwargs or {}),
)
text_feature = (
self.model.get_text_features(**text_input, **model_kwargs)
self.model.get_text_features(
**text_input, **(model_kwargs or {})
)
.squeeze(dim=0)
.tolist()
)
Expand Down
2 changes: 1 addition & 1 deletion camel/interpreters/internal_python_interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ def _execute_call(self, call: ast.Call) -> Any:
keyword.arg: self._execute_ast(keyword.value)
for keyword in call.keywords
}
return callable_func(*args, **kwargs)
return callable_func(*args, **kwargs) # type: ignore[arg-type]

def _execute_subscript(self, subscript: ast.Subscript):
index = self._execute_ast(subscript.slice)
Expand Down
2 changes: 2 additions & 0 deletions camel/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@
from .vllm_model import VLLMModel
from .volcano_model import VolcanoModel
from .watsonx_model import WatsonXModel
from .xai_model import XAIModel
from .yi_model import YiModel
from .zhipuai_model import ZhipuAIModel

Expand Down Expand Up @@ -120,4 +121,5 @@
'AihubMixModel',
'FunctionGemmaModel',
'AvianModel',
'XAIModel',
]
6 changes: 3 additions & 3 deletions camel/models/mistral_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,15 +202,15 @@ def _to_mistral_chatmessage(
mistral_tool_calls = []
for tool_call in tool_calls_list:
function_call = FunctionCall(
name=tool_call["function"].get("name"), # type: ignore[attr-defined]
arguments=tool_call["function"].get("arguments"), # type: ignore[attr-defined]
name=tool_call["function"].get("name"), # type: ignore[index]
arguments=tool_call["function"].get("arguments"), # type: ignore[index]
)
# Preserve the original tool call id to keep tool result
# ordering valid across turns.
mistral_tool_calls.append(
ToolCall(
function=function_call,
id=tool_call.get("id"), # type: ignore[attr-defined]
id=tool_call.get("id"), # type: ignore[union-attr]
)
)

Expand Down
2 changes: 2 additions & 0 deletions camel/models/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
from camel.models.vllm_model import VLLMModel
from camel.models.volcano_model import VolcanoModel
from camel.models.watsonx_model import WatsonXModel
from camel.models.xai_model import XAIModel
from camel.models.yi_model import YiModel
from camel.models.zhipuai_model import ZhipuAIModel
from camel.types import ModelPlatformType, ModelType, UnifiedModelType
Expand Down Expand Up @@ -121,6 +122,7 @@ class ModelFactory:
ModelPlatformType.AIHUBMIX: AihubMixModel,
ModelPlatformType.AVIAN: AvianModel,
ModelPlatformType.FUNCTION_GEMMA: FunctionGemmaModel,
ModelPlatformType.XAI: XAIModel,
}

@staticmethod
Expand Down
Loading
Loading