|
327 | 327 | "response = await model_client.create([UserMessage(content=\"What is the capital of France?\", source=\"user\")])\n",
|
328 | 328 | "print(response)"
|
329 | 329 | ]
|
| 330 | + }, |
| 331 | + { |
| 332 | + "cell_type": "markdown", |
| 333 | + "metadata": {}, |
| 334 | + "source": [ |
| 335 | + "## Semantic Kernel Adapter\n", |
| 336 | + "\n", |
| 337 | + "The {py:class}`~autogen_ext.models.semantic_kernel.SKChatCompletionAdapter`\n", |
| 338 | + "allows you to use Semantic kernel model clients as a\n", |
| 339 | + "{py:class}`~autogen_core.models.ChatCompletionClient` by adapting them to the required interface.\n", |
| 340 | + "\n", |
| 341 | + "You need to install the relevant provider extras to use this adapter. \n", |
| 342 | + "\n", |
| 343 | + "The list of extras that can be installed:\n", |
| 344 | + "\n", |
| 345 | + "- `semantic-kernel-anthropic`: Install this extra to use Anthropic models.\n", |
| 346 | + "- `semantic-kernel-google`: Install this extra to use Google Gemini models.\n", |
| 347 | + "- `semantic-kernel-ollama`: Install this extra to use Ollama models.\n", |
| 348 | + "- `semantic-kernel-mistralai`: Install this extra to use MistralAI models.\n", |
| 349 | + "- `semantic-kernel-aws`: Install this extra to use AWS models.\n", |
| 350 | + "- `semantic-kernel-hugging-face`: Install this extra to use Hugging Face models.\n", |
| 351 | + "\n", |
| 352 | + "For example, to use Anthropic models, you need to install `semantic-kernel-anthropic`." |
| 353 | + ] |
| 354 | + }, |
| 355 | + { |
| 356 | + "cell_type": "code", |
| 357 | + "execution_count": null, |
| 358 | + "metadata": { |
| 359 | + "vscode": { |
| 360 | + "languageId": "shellscript" |
| 361 | + } |
| 362 | + }, |
| 363 | + "outputs": [], |
| 364 | + "source": [ |
| 365 | + "# pip install \"autogen-ext[semantic-kernel-anthropic]\"" |
| 366 | + ] |
| 367 | + }, |
| 368 | + { |
| 369 | + "cell_type": "markdown", |
| 370 | + "metadata": {}, |
| 371 | + "source": [ |
| 372 | + "To use this adapter, you need create a Semantic Kernel model client and pass it to the adapter.\n", |
| 373 | + "\n", |
| 374 | + "For example, to use the Anthropic model:" |
| 375 | + ] |
| 376 | + }, |
| 377 | + { |
| 378 | + "cell_type": "code", |
| 379 | + "execution_count": 1, |
| 380 | + "metadata": {}, |
| 381 | + "outputs": [ |
| 382 | + { |
| 383 | + "name": "stdout", |
| 384 | + "output_type": "stream", |
| 385 | + "text": [ |
| 386 | + "finish_reason='stop' content='The capital of France is Paris. It is also the largest city in France and one of the most populous metropolitan areas in Europe.' usage=RequestUsage(prompt_tokens=0, completion_tokens=0) cached=False logprobs=None\n" |
| 387 | + ] |
| 388 | + } |
| 389 | + ], |
| 390 | + "source": [ |
| 391 | + "import os\n", |
| 392 | + "\n", |
| 393 | + "from autogen_core.models import UserMessage\n", |
| 394 | + "from autogen_ext.models.semantic_kernel import SKChatCompletionAdapter\n", |
| 395 | + "from semantic_kernel import Kernel\n", |
| 396 | + "from semantic_kernel.connectors.ai.anthropic import AnthropicChatCompletion, AnthropicChatPromptExecutionSettings\n", |
| 397 | + "from semantic_kernel.memory.null_memory import NullMemory\n", |
| 398 | + "\n", |
| 399 | + "sk_client = AnthropicChatCompletion(\n", |
| 400 | + " ai_model_id=\"claude-3-5-sonnet-20241022\",\n", |
| 401 | + " api_key=os.environ[\"ANTHROPIC_API_KEY\"],\n", |
| 402 | + " service_id=\"my-service-id\", # Optional; for targeting specific services within Semantic Kernel\n", |
| 403 | + ")\n", |
| 404 | + "settings = AnthropicChatPromptExecutionSettings(\n", |
| 405 | + " temperature=0.2,\n", |
| 406 | + ")\n", |
| 407 | + "\n", |
| 408 | + "anthropic_model_client = SKChatCompletionAdapter(\n", |
| 409 | + " sk_client, kernel=Kernel(memory=NullMemory()), prompt_settings=settings\n", |
| 410 | + ")\n", |
| 411 | + "\n", |
| 412 | + "# Call the model directly.\n", |
| 413 | + "model_result = await anthropic_model_client.create(\n", |
| 414 | + " messages=[UserMessage(content=\"What is the capital of France?\", source=\"User\")]\n", |
| 415 | + ")\n", |
| 416 | + "print(model_result)" |
| 417 | + ] |
| 418 | + }, |
| 419 | + { |
| 420 | + "cell_type": "markdown", |
| 421 | + "metadata": {}, |
| 422 | + "source": [ |
| 423 | + "Read more about the [Semantic Kernel Adapter](../../../reference/python/autogen_ext.models.semantic_kernel.rst)." |
| 424 | + ] |
330 | 425 | }
|
331 | 426 | ],
|
332 | 427 | "metadata": {
|
|
0 commit comments