|
|
|
@ -51,12 +51,12 @@ CHAT_MODEL_FEAT_TABLE_CORRECTION = {
|
|
|
|
|
"package": "langchain-google-vertexai",
|
|
|
|
|
},
|
|
|
|
|
"ChatGroq": {
|
|
|
|
|
"tool_calling": "partial",
|
|
|
|
|
"tool_calling": True,
|
|
|
|
|
"structured_output": True,
|
|
|
|
|
"package": "langchain-groq",
|
|
|
|
|
},
|
|
|
|
|
"ChatCohere": {
|
|
|
|
|
"tool_calling": "partial",
|
|
|
|
|
"tool_calling": True,
|
|
|
|
|
"structured_output": True,
|
|
|
|
|
"package": "langchain-cohere",
|
|
|
|
|
},
|
|
|
|
@ -99,7 +99,6 @@ All ChatModels implement the Runnable interface, which comes with default implem
|
|
|
|
|
|
|
|
|
|
Each ChatModel integration can optionally provide native implementations to truly enable async or streaming.
|
|
|
|
|
The table shows, for each integration, which features have been implemented with native support.
|
|
|
|
|
Yellow circles (🟡) indicates partial support - for example, if the model supports tool calling but not tool messages for agents.
|
|
|
|
|
|
|
|
|
|
{table}
|
|
|
|
|
|
|
|
|
|