From 48cf96a66b1fcada81a4aba45db49ccbd235998a Mon Sep 17 00:00:00 2001
From: Filinto Duran <1373693+filintod@users.noreply.github.com>
Date: Wed, 6 Aug 2025 10:42:13 -0500
Subject: [PATCH 1/3] add alpha2 examples to conversation sdk
Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com>
---
conversation/components/ollama.yaml | 14 +
conversation/components/openai.yaml | 12 +
conversation/python/http/README.md | 54 ++++
conversation/python/sdk/README.md | 262 +++++++++++-------
conversation/python/sdk/conversation/app.py | 22 +-
.../python/sdk/conversation/requirements.txt | 4 +-
.../python/sdk/conversation/tool_calling.py | 52 ++++
.../tool_calling_from_function.py | 72 +++++
8 files changed, 384 insertions(+), 108 deletions(-)
create mode 100644 conversation/components/ollama.yaml
create mode 100644 conversation/components/openai.yaml
create mode 100644 conversation/python/sdk/conversation/tool_calling.py
create mode 100644 conversation/python/sdk/conversation/tool_calling_from_function.py
diff --git a/conversation/components/ollama.yaml b/conversation/components/ollama.yaml
new file mode 100644
index 000000000..ac8122076
--- /dev/null
+++ b/conversation/components/ollama.yaml
@@ -0,0 +1,14 @@
+apiVersion: dapr.io/v1alpha1
+kind: Component
+metadata:
+ name: ollama
+spec:
+ type: conversation.openai
+ version: v1
+ metadata:
+ - name: key
+ value: 'ollama'
+ - name: model
+ value: gpt-oss:20b
+ - name: endpoint
+ value: 'http://localhost:11434/v1' # ollama endpoint https://ollama.com/blog/openai-compatibility
diff --git a/conversation/components/openai.yaml b/conversation/components/openai.yaml
new file mode 100644
index 000000000..29729c641
--- /dev/null
+++ b/conversation/components/openai.yaml
@@ -0,0 +1,12 @@
+apiVersion: dapr.io/v1alpha1
+kind: Component
+metadata:
+ name: openai
+spec:
+ type: conversation.openai
+ version: v1
+ metadata:
+ - name: key
+ value: "YOUR_OPENAI_API_KEY"
+ - name: model
+ value: gpt-4o-mini-2024-07-18
diff --git a/conversation/python/http/README.md b/conversation/python/http/README.md
index 2b2f7db4d..986e6578b 100644
--- a/conversation/python/http/README.md
+++ b/conversation/python/http/README.md
@@ -24,7 +24,34 @@ name: Install Python dependencies
```bash
cd ./conversation
+```
+
+
+Option 1: Using venv (Python's built-in virtual environment)
+
+```bash
+python3 -m venv .venv
+source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
pip3 install -r requirements.txt
+```
+
+
+
+
+Option 2: Using uv (faster alternative to pip)
+
+```bash
+python3 -m venv .venv
+source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+# If you don't have uv installed yet, install it first:
+# pip install uv
+uv pip install -r requirements.txt
+```
+
+
+
+```bash
+# Return to the parent directory
cd ..
```
@@ -82,12 +109,39 @@ Open a terminal and run:
```bash
cd ./conversation
+```
+
+
+Option 1: Using venv (Python's built-in virtual environment)
+
+```bash
+python3 -m venv .venv
+source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
pip3 install -r requirements.txt
```
+
+
+
+Option 2: Using uv (faster alternative to pip)
+
+```bash
+python3 -m venv .venv
+source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+# If you don't have uv installed yet, install it first:
+# pip install uv
+uv pip install -r requirements.txt
+```
+
+
+
2. Run the application:
```bash
+# Make sure your virtual environment is activated
+# If not already activated, run:
+# source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+
dapr run --app-id conversation --resources-path ../../../components -- python3 app.py
```
diff --git a/conversation/python/sdk/README.md b/conversation/python/sdk/README.md
index a9836c3a7..72966b565 100644
--- a/conversation/python/sdk/README.md
+++ b/conversation/python/sdk/README.md
@@ -1,97 +1,171 @@
# Dapr Conversation API (Python SDK)
-In this quickstart, you'll send an input to a mock Large Language Model (LLM) using Dapr's Conversation API. This API is responsible for providing one consistent API entry point to talk to underlying LLM providers.
+This quickstart demonstrates how to interact with Large Language Models (LLMs) using Dapr's Conversation API. The Conversation API provides a unified interface for communicating with various LLM providers through a consistent entry point.
-Visit [this](https://docs.dapr.io/developing-applications/building-blocks/conversation/conversation-overview/) link for more information about Dapr and the Conversation API.
-
-This quickstart includes one app:
-
-- `app.py`, responsible for sending an input to the underlying LLM and retrieving an output.
-
-## Run the app with the template file
-
-This section shows how to run the application using the [multi-app run template files](https://docs.dapr.io/developing-applications/local-development/multi-app-dapr-run/multi-app-overview/) with `dapr run -f .`.
-
-This example uses the default LLM Component provided by Dapr which simply echoes the input provided, for testing purposes. Here are other [supported Conversation components](https://docs.dapr.io/reference/components-reference/supported-conversation/).
-
-1. Install dependencies:
-
-
-
-```bash
-cd ./conversation
-pip3 install -r requirements.txt
-cd ..
-```
-
-
-
-2. Open a new terminal window and run the multi app run template:
-
-
-
-```bash
-dapr run -f .
-```
-
-The terminal console output should look similar to this, where:
-
-- The app sends an input `What is dapr?` to the `echo` Component mock LLM.
-- The mock LLM echoes `What is dapr?`.
-
-```text
-== APP - conversation == Input sent: What is dapr?
-== APP - conversation == Output response: What is dapr?
-```
-
-
-
-3. Stop and clean up application processes.
-
-
-
-```bash
-dapr stop -f .
-```
-
-
-
-## Run the app with the Dapr CLI
-
-1. Install dependencies:
-
-Open a terminal and run:
-
-```bash
-cd ./conversation
-pip3 install -r requirements.txt
-```
-
-2. Run the application:
-
-```bash
-dapr run --app-id conversation --resources-path ../../../components -- python3 app.py
-```
-
-You should see the output:
-
-```bash
-== APP == Input sent: What is dapr?
-== APP == Output response: What is dapr?
-```
+For comprehensive documentation on Dapr's Conversation API, see the [official documentation](https://docs.dapr.io/developing-applications/building-blocks/conversation/conversation-overview/).
+
+## Sample Applications
+
+This quickstart includes three example applications:
+
+- `app.py`: Basic example that sends a prompt to an LLM and retrieves the response
+- `tool_calling.py`: Advanced example that defines a tool and sends a request to an LLM that supports tool calling
+- `tool_calling_from_function.py`: Similar to `tool_calling.py` but uses a helper function to generate the JSON schema for function calling
+
+## LLM Providers
+
+By default, this quickstart uses Dapr's mock LLM Echo Component, which simply echoes back the input for testing purposes.
+
+The repository also includes pre-configured components for the following LLM providers:
+- [OpenAI](../../components/openai.yaml)
+- [Ollama](../../components/ollama.yaml) (via its OpenAI compatibility layer)
+
+To use one of these alternative provider, modify the `provider_component` value in your application code from `echo` to either `openai` or `ollama`.
+
+Of course, you can also play adding components for other LLM providers supported by Dapr.
+
+### OpenAI Configuration
+
+To use the OpenAI provider:
+
+1. Change the `provider_component` parameter in your application code to `openai`
+2. Edit the [openai.yaml](../../components/openai.yaml) component file and replace `YOUR_OPENAI_API_KEY` with your actual OpenAI API key
+
+### Ollama Configuration
+
+To use the Ollama provider:
+
+1. Change the `provider_component` parameter in your application code to `ollama`
+2. Install and run Ollama locally on your machine
+3. Pull a model with tool-calling support from the [Ollama models repository](https://ollama.com/search?c=tools)
+
+The default configuration uses the `gpt-oss:20b` model, but you can modify the component file to use any compatible model that your system can run.
+
+## Running the Application
+
+You can run the sample applications using either the Dapr multi-app template or the Dapr CLI directly.
+
+### Option 1: Using the Multi-App Template
+
+This approach uses [Dapr's multi-app run template files](https://docs.dapr.io/developing-applications/local-development/multi-app-dapr-run/multi-app-overview/) to simplify deployment with `dapr run -f .`.
+
+For more LLM options, see the [supported Conversation components](https://docs.dapr.io/reference/components-reference/supported-conversation/) documentation.
+
+1. **Install dependencies:**
+
+
+
+ ```bash
+ cd ./conversation
+ ```
+
+
+ Option 1: Using pip
+
+ ```bash
+ python3 -m venv .venv
+ source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+ pip3 install -r requirements.txt
+ ```
+
+
+
+
+ Option 2: Using uv (faster alternative to pip)
+
+ ```bash
+ python3 -m venv .venv
+ source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+ # If you do not have uv installed yet, install it first:
+ # pip install uv
+ uv pip install -r requirements.txt
+ ```
+
+
+
+ ```bash
+ # Return to the parent directory
+ cd ..
+ ```
+
+
+2. **Run the application:**
+
+
+
+ ```bash
+ dapr run -f .
+ ```
+
+ Expected output:
+
+ ```text
+ == APP - conversation == Input sent: What is dapr?
+ == APP - conversation == Output response: What is dapr?
+ ```
+
+
+
+3. **Stop the application:**
+
+
+
+ ```bash
+ dapr stop -f .
+ ```
+
+
+
+### Option 2: Using the Dapr CLI Directly
+
+As an alternative to the multi-app template, you can run the application directly with the Dapr CLI.
+
+1. **Install dependencies:**
+
+ ```bash
+ cd ./conversation
+ python3 -m venv .venv
+ source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+ pip3 install -r requirements.txt
+ ```
+
+2. **Run the application:**
+
+ ```bash
+ dapr run --app-id conversation --resources-path ../../../components -- python3 app.py
+ ```
+
+ Expected output:
+
+ ```text
+ == APP == Input sent: What is dapr?
+ == APP == Output response: What is dapr?
+ ```
+
+3. **Try the tool calling examples:**
+
+ You can run the other example applications similarly:
+
+ ```bash
+ # For tool calling example
+ dapr run --app-id conversation --resources-path ../../../components -- python3 tool_calling.py
+
+ # For tool calling with function helper example
+ dapr run --app-id conversation --resources-path ../../../components -- python3 tool_calling_from_function.py
+ ```
diff --git a/conversation/python/sdk/conversation/app.py b/conversation/python/sdk/conversation/app.py
index 3e1fac0ce..4c3b7d728 100644
--- a/conversation/python/sdk/conversation/app.py
+++ b/conversation/python/sdk/conversation/app.py
@@ -11,24 +11,20 @@
# limitations under the License.
# ------------------------------------------------------------
from dapr.clients import DaprClient
-from dapr.clients.grpc._request import ConversationInput
+from dapr.clients.grpc._request import ConversationInputAlpha2, ConversationMessage, ConversationMessageContent, ConversationMessageOfUser
with DaprClient() as d:
+ text_input = "What is dapr?"
+ provider_component = "echo"
+
inputs = [
- ConversationInput(content="What is dapr?", role='user', scrub_pii=True),
+ ConversationInputAlpha2(messages=[ConversationMessage(of_user=ConversationMessageOfUser(content=[ConversationMessageContent(text=text_input)]))],
+ scrub_pii=True),
]
- metadata = {
- 'model': 'modelname',
- 'key': 'authKey',
- 'cacheTTL': '10m',
- }
-
- print('Input sent: What is dapr?')
+ print(f'Input sent: {text_input}')
- response = d.converse_alpha1(
- name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata
- )
+ response = d.converse_alpha2(name=provider_component, inputs=inputs, temperature=0.7, context_id='chat-123')
for output in response.outputs:
- print(f'Output response: {output.result}')
+ print(f'Output response: {output.choices[0].message.content}')
diff --git a/conversation/python/sdk/conversation/requirements.txt b/conversation/python/sdk/conversation/requirements.txt
index 920f12393..c511d4017 100644
--- a/conversation/python/sdk/conversation/requirements.txt
+++ b/conversation/python/sdk/conversation/requirements.txt
@@ -1 +1,3 @@
-dapr>=1.15.0
\ No newline at end of file
+#dapr>=1.15.0
+
+-e ../../../../../python-sdk
\ No newline at end of file
diff --git a/conversation/python/sdk/conversation/tool_calling.py b/conversation/python/sdk/conversation/tool_calling.py
new file mode 100644
index 000000000..8afccd623
--- /dev/null
+++ b/conversation/python/sdk/conversation/tool_calling.py
@@ -0,0 +1,52 @@
+# ------------------------------------------------------------
+# Copyright 2025 The Dapr Authors
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------
+from dapr.clients import DaprClient
+from dapr.clients.grpc._request import (ConversationInputAlpha2, ConversationMessage, ConversationMessageContent,
+ ConversationMessageOfUser, ConversationToolsFunction, ConversationTools)
+
+with DaprClient() as d:
+ provider_component = "echo"
+
+ function = ConversationToolsFunction(
+ name="calculate",
+ description="Perform calculations",
+ parameters={
+ "type": "object",
+ "properties": {
+ "expression": {
+ "type": "string",
+ "description": "Math expression"
+ }
+ },
+ "required": ["expression"]
+ }
+ )
+ calc_tool = ConversationTools(function=function)
+
+ textInput = "calculate square root of 15"
+ inputs = [
+ ConversationInputAlpha2(messages=[ConversationMessage(of_user=ConversationMessageOfUser(content=[ConversationMessageContent(text=textInput)]))],
+ scrub_pii=True),
+ ]
+
+ print(f'Input sent: {textInput}')
+
+ response = d.converse_alpha2(
+ name=provider_component,
+ inputs=inputs,
+ temperature=0.7,
+ tools=[calc_tool],
+ )
+
+ for output in response.outputs:
+ print(f'Output response: {output.choices[0]}')
diff --git a/conversation/python/sdk/conversation/tool_calling_from_function.py b/conversation/python/sdk/conversation/tool_calling_from_function.py
new file mode 100644
index 000000000..7b9703176
--- /dev/null
+++ b/conversation/python/sdk/conversation/tool_calling_from_function.py
@@ -0,0 +1,72 @@
+# ------------------------------------------------------------
+# Copyright 2025 The Dapr Authors
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------
+from dapr.clients import DaprClient
+from dapr.clients.grpc._request import (ConversationInputAlpha2, ConversationMessage, ConversationMessageContent,
+ ConversationMessageOfUser, ConversationToolsFunction, ConversationTools)
+
+# The automated function to schema converter requires the function with its docstring and arguments typed and a
+# brief description of each in the docstring.
+def calculate(expression: str) -> str:
+ """Perform calculations.
+
+ It allows for the following calculations:
+ - Square root of a number.
+ - Addition, subtraction, multiplication and division.
+
+ It CANNOT do trigonometry calculations.
+
+ Args:
+ expression (str): Math expression.
+ """
+ return expression
+
+def execute_converse_alpha2(text_input: str, tool: ConversationTools) -> None:
+
+ # disable scrubbing of PII as some numbers can be matched as phone numbers
+ inputs = [
+ ConversationInputAlpha2(messages=[ConversationMessage(of_user=ConversationMessageOfUser(content=[ConversationMessageContent(text=text_input)]))],
+ scrub_pii=False),
+ ]
+
+ print(f'Input sent: {text_input}')
+
+ response = d.converse_alpha2(
+ name=provider_component,
+ inputs=inputs,
+ temperature=0.7,
+ tools=[calc_tool],
+ )
+
+ for output in response.outputs:
+ print(f'Output response: {output.choices[0]}')
+
+with DaprClient() as d:
+ provider_component = "echo" # Change to your provider component name
+
+ # uses the function to schema converter to generate the ConversationToolsFunction automatically, especially the json-schema section
+ func = ConversationToolsFunction.from_function(calculate)
+ calc_tool = ConversationTools(function=func)
+
+ print(f'Function schema generated: \n Name: {func.name}\n Description: {func.description}\n Parameters (json-schema): {func.parameters}\n')
+
+ print('\n----------\n')
+
+ print('First call to calculator should trigger a tool call on both real and echo providers:')
+
+ execute_converse_alpha2("calculate square root of 15", calc_tool)
+
+ print('\n----------\n')
+ print('Second call to calculator should not trigger a tool call on a real provider but on echo provider it will still do the echo as a tool call:')
+
+ execute_converse_alpha2("calculate the sine of 195.33", calc_tool)
+
From 7cceaba3d2cab87fe3593044aaf693c69322c95e Mon Sep 17 00:00:00 2001
From: Filinto Duran <1373693+filintod@users.noreply.github.com>
Date: Sun, 17 Aug 2025 19:46:57 -0500
Subject: [PATCH 2/3] remove real llm provider info
Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com>
---
conversation/python/http/README.md | 307 +++++++++---------
conversation/python/sdk/README.md | 117 ++++---
conversation/python/sdk/conversation/app.py | 2 +-
.../python/sdk/conversation/tool_calling.py | 46 ++-
.../tool_calling_from_function.py | 72 ----
.../python/sdk/dapr-tool-calling.yaml | 7 +
6 files changed, 275 insertions(+), 276 deletions(-)
delete mode 100644 conversation/python/sdk/conversation/tool_calling_from_function.py
create mode 100644 conversation/python/sdk/dapr-tool-calling.yaml
diff --git a/conversation/python/http/README.md b/conversation/python/http/README.md
index d7bf31576..54a747370 100644
--- a/conversation/python/http/README.md
+++ b/conversation/python/http/README.md
@@ -16,162 +16,173 @@ This section shows how to run the application using the [multi-app run template
This example uses the default LLM Component provided by Dapr which simply echoes the input provided, for testing purposes. Here are other [supported Conversation components](https://docs.dapr.io/reference/components-reference/supported-conversation/).
-1. Install dependencies:
-
-
-
-```bash
-cd ./conversation
-```
-
-
-Option 1: Using venv (Python's built-in virtual environment)
-
-```bash
-python3 -m venv .venv
-source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
-pip3 install -r requirements.txt
-```
-
-
-
-
-Option 2: Using uv (faster alternative to pip)
-
-```bash
-python3 -m venv .venv
-source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
-# If you don't have uv installed yet, install it first:
-# pip install uv
-uv pip install -r requirements.txt
-```
-
-
-
-```bash
-# Return to the parent directory
-cd ..
-```
-
-
+1. Install dependencies:
+
+
+ Option 2: Using uv (faster modern alternative to pip)
+
+ ```
+ cd conversation
+
+ python3 -m venv .venv
+ source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+
+ # If you don't have uv installed yet, install it first:
+ # pip install uv
+ uv pip install -r requirements.txt
+ ```
+
+
+
+
+ Option 1: Using classic pip
+
+
+
+ ```bash
+ cd conversation
+
+ python3 -m venv .venv
+ source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+
+ pip install -r requirements.txt
+ ```
+
+
+
+
+
+ ```bash
+ # Return to the parent directory
+ cd ..
+ ```
2. Open a new terminal window and run the multi app run template:
-
-
-```bash
-dapr run -f .
-```
-
-The terminal console output should look similar to this, where:
-
-- The app first sends an input `What is dapr?` to the `echo` Component mock LLM.
-- The mock LLM echoes `What is dapr?`.
-- The app then sends a weather request to the component with tools available to the LLM.
-- The LLM will either respond back with a tool call for the user, or an ask for more information.
-
-```text
-== APP - conversation == Input sent: What is dapr?
-== APP - conversation == Output response: What is dapr?
-```
-
-- The app then sends an input `What is the weather like in San Francisco in celsius?` to the `echo` Component mock LLM.
-- The mock LLM echoes `What is the weather like in San Francisco in celsius?` and calls the `get_weather` tool.
-- Since we are using the `echo` Component mock LLM, the tool call is not executed and the LLM returns `No tool calls in response`.
-
-```text
-== APP == Tool calling input sent: What is the weather like in San Francisco in celsius?
-== APP == Output message: What is the weather like in San Francisco in celsius?
-== APP == No tool calls in response
-```
-```
-
-
-
-2. Stop and clean up application processes.
-
-
-
-```bash
-dapr stop -f .
-```
-
-
+
+
+ ```bash
+ source conversation/.venv/bin/activate
+ dapr run -f .
+ ```
+
+ The terminal console output should look similar to this, where:
+
+ - The app first sends an input `What is dapr?` to the `echo` Component mock LLM.
+ - The mock LLM echoes `What is dapr?`.
+ - The app then sends a weather request to the component with tools available to the LLM.
+ - The LLM will either respond back with a tool call for the user, or an ask for more information.
+
+ ```text
+ == APP - conversation == Input sent: What is dapr?
+ == APP - conversation == Output response: What is dapr?
+ ```
+
+ - The app then sends an input `What is the weather like in San Francisco in celsius?` to the `echo` Component mock LLM.
+ - The mock LLM echoes `What is the weather like in San Francisco in celsius?` and calls the `get_weather` tool.
+ - Since we are using the `echo` Component mock LLM, the tool call is not executed and the LLM returns `No tool calls in response`.
+
+ ```text
+ == APP == Tool calling input sent: What is the weather like in San Francisco in celsius?
+ == APP == Output message: What is the weather like in San Francisco in celsius?
+ == APP - conversation == Tool calls detected:
+ == APP - conversation == Tool call: {'id': '0', 'function': {'name': 'get_weather', 'arguments': 'location,unit'}}
+ == APP - conversation == Function name: get_weather
+ == APP - conversation == Function arguments: location,unit
+ ```
+
+
+
+3. Stop and clean up application processes.
+
+
+
+ ```bash
+ dapr stop -f .
+ ```
+
+
## Run the app with the Dapr CLI
1. Install dependencies:
-Open a terminal and run:
-
-```bash
-cd ./conversation
-```
-
-
-Option 1: Using venv (Python's built-in virtual environment)
-
-```bash
-python3 -m venv .venv
-source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
-pip3 install -r requirements.txt
-```
-
-
-
-
-Option 2: Using uv (faster alternative to pip)
-
-```bash
-python3 -m venv .venv
-source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
-# If you don't have uv installed yet, install it first:
-# pip install uv
-uv pip install -r requirements.txt
-```
-
-
+ Open a terminal and run:
+
+ ```bash
+ cd ./conversation
+ ```
+
+
+ Option 1: Using uv (faster alternative to pip)
+
+ ```bash
+ python3 -m venv .venv
+ source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+ # If you don't have uv installed yet, install it first:
+ # pip install uv
+ uv pip install -r requirements.txt
+ ```
+
+
+
+
+ Option 2: Using classic pip
+
+ ```bash
+ python3 -m venv .venv
+ source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+ pip3 install -r requirements.txt
+ ```
+
+
2. Run the application:
-```bash
-# Make sure your virtual environment is activated
-# If not already activated, run:
-# source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
-
-dapr run --app-id conversation --resources-path ../../../components -- python3 app.py
-```
-
-The terminal console output should look similar to this, where:
-
-- The app first sends an input `What is dapr?` to the `echo` Component mock LLM.
-- The mock LLM echoes `What is dapr?`.
-- The app then sends an input `What is the weather like in San Francisco in celsius?` to the `echo` Component mock LLM.
-- The mock LLM echoes `What is the weather like in San Francisco in celsius?`
-
-```text
-== APP - conversation == Conversation input sent: What is dapr?
-== APP - conversation == Output response: What is dapr?
-== APP - conversation == Tool calling input sent: What is the weather like in San Francisco in celsius?
-== APP - conversation == Output message: What is the weather like in San Francisco in celsius?
-== APP - conversation == No tool calls in response
-```
\ No newline at end of file
+ ```bash
+ # Make sure your virtual environment is activated
+ # If not already activated, run:
+ # source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
+
+ dapr run --app-id conversation --resources-path ../../../components -- python3 app.py
+ ```
+
+ The terminal console output should look similar to this, where:
+
+ - The app first sends an input `What is dapr?` to the `echo` Component mock LLM.
+ - The mock LLM echoes `What is dapr?`.
+ - The app then sends an input `What is the weather like in San Francisco in celsius?` to the `echo` Component mock LLM.
+ - The mock LLM echoes `What is the weather like in San Francisco in celsius?`
+
+ ```text
+ == APP - conversation == Conversation input sent: What is dapr?
+ == APP - conversation == Output response: What is dapr?
+ == APP - conversation == Tool calling input sent: What is the weather like in San Francisco in celsius?
+ == APP - conversation == Output message: What is the weather like in San Francisco in celsius?
+ == APP - conversation == Tool calls detected:
+ == APP - conversation == Tool call: {'id': '0', 'function': {'name': 'get_weather', 'arguments': 'location,unit'}}
+ == APP - conversation == Function name: get_weather
+ == APP - conversation == Function arguments: location,unit
+ ```
\ No newline at end of file
diff --git a/conversation/python/sdk/README.md b/conversation/python/sdk/README.md
index 72966b565..3cc488c4f 100644
--- a/conversation/python/sdk/README.md
+++ b/conversation/python/sdk/README.md
@@ -6,40 +6,12 @@ For comprehensive documentation on Dapr's Conversation API, see the [official do
## Sample Applications
-This quickstart includes three example applications:
+This quickstart includes two example applications:
- `app.py`: Basic example that sends a prompt to an LLM and retrieves the response
-- `tool_calling.py`: Advanced example that defines a tool and sends a request to an LLM that supports tool calling
-- `tool_calling_from_function.py`: Similar to `tool_calling.py` but uses a helper function to generate the JSON schema for function calling
-
-## LLM Providers
-
-By default, this quickstart uses Dapr's mock LLM Echo Component, which simply echoes back the input for testing purposes.
-
-The repository also includes pre-configured components for the following LLM providers:
-- [OpenAI](../../components/openai.yaml)
-- [Ollama](../../components/ollama.yaml) (via its OpenAI compatibility layer)
-
-To use one of these alternative provider, modify the `provider_component` value in your application code from `echo` to either `openai` or `ollama`.
-
-Of course, you can also play adding components for other LLM providers supported by Dapr.
-
-### OpenAI Configuration
-
-To use the OpenAI provider:
-
-1. Change the `provider_component` parameter in your application code to `openai`
-2. Edit the [openai.yaml](../../components/openai.yaml) component file and replace `YOUR_OPENAI_API_KEY` with your actual OpenAI API key
-
-### Ollama Configuration
-
-To use the Ollama provider:
-
-1. Change the `provider_component` parameter in your application code to `ollama`
-2. Install and run Ollama locally on your machine
-3. Pull a model with tool-calling support from the [Ollama models repository](https://ollama.com/search?c=tools)
-
-The default configuration uses the `gpt-oss:20b` model, but you can modify the component file to use any compatible model that your system can run.
+- `tool_calling.py`: An example that demonstrates how to use the Conversation API to perform external tool calling with two approaches:
+ - Creating the tool definition json schema manually
+ - Using the `@tool` decorator to automatically generate the schema
## Running the Application
@@ -52,35 +24,36 @@ This approach uses [Dapr's multi-app run template files](https://docs.dapr.io/de
For more LLM options, see the [supported Conversation components](https://docs.dapr.io/reference/components-reference/supported-conversation/) documentation.
1. **Install dependencies:**
-
-
```bash
cd ./conversation
```
- Option 1: Using pip
-
+ Option 1: Using uv (faster modern alternative to pip)
+
```bash
python3 -m venv .venv
source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
- pip3 install -r requirements.txt
+ # If you do not have uv installed yet, install it first:
+ # pip install uv
+ uv pip install -r requirements.txt
```
-
+
-
+
- Option 2: Using uv (faster alternative to pip)
+ Option 2: Using pip
+
+
```bash
+ cd conversation
python3 -m venv .venv
source .venv/bin/activate # On Windows, use: .venv\Scripts\activate
- # If you do not have uv installed yet, install it first:
- # pip install uv
- uv pip install -r requirements.txt
+ pip3 install -r requirements.txt
```
@@ -91,7 +64,7 @@ For more LLM options, see the [supported Conversation components](https://docs.d
```
-2. **Run the application:**
+2. **Run the simple Conversation application:**
```bash
- dapr run -f .
+ source conversation/.venv/bin/activate
+ dapr run -f .
```
Expected output:
@@ -132,6 +106,52 @@ For more LLM options, see the [supported Conversation components](https://docs.d
+4. **Run the tool Calling Conversation application:**
+
+
+
+ ```bash
+ source conversation/.venv/bin/activate
+ dapr run -f dapr-tool-calling.yaml
+ ```
+
+ Expected output:
+
+ ```text
+ == APP - conversation == Input sent: calculate square root of 15
+ == APP - conversation == Output response: ConversationResultAlpha2Choices(finish_reason='tool_calls', index=0, message=ConversationResultAlpha2Message(content='calculate square root of 15', tool_calls=[ConversationToolCalls(id='0', function=ConversationToolCallsOfFunction(name='calculate', arguments='expression'))]))
+ == APP - conversation == Input sent: get weather in London in celsius
+ == APP - conversation == Output response: ConversationResultAlpha2Choices(finish_reason='tool_calls', index=0, message=ConversationResultAlpha2Message(content='get weather in London in celsius', tool_calls=[ConversationToolCalls(id='0', function=ConversationToolCallsOfFunction(name='get_weather', arguments='location,unit'))]))
+ ```
+
+
+
+5. **Stop the tool calling application:**
+
+
+
+ ```bash
+ dapr stop -f .
+ ```
+
+
+
### Option 2: Using the Dapr CLI Directly
As an alternative to the multi-app template, you can run the application directly with the Dapr CLI.
@@ -165,7 +185,4 @@ As an alternative to the multi-app template, you can run the application directl
```bash
# For tool calling example
dapr run --app-id conversation --resources-path ../../../components -- python3 tool_calling.py
-
- # For tool calling with function helper example
- dapr run --app-id conversation --resources-path ../../../components -- python3 tool_calling_from_function.py
```
diff --git a/conversation/python/sdk/conversation/app.py b/conversation/python/sdk/conversation/app.py
index 4c3b7d728..db898a9f1 100644
--- a/conversation/python/sdk/conversation/app.py
+++ b/conversation/python/sdk/conversation/app.py
@@ -11,7 +11,7 @@
# limitations under the License.
# ------------------------------------------------------------
from dapr.clients import DaprClient
-from dapr.clients.grpc._request import ConversationInputAlpha2, ConversationMessage, ConversationMessageContent, ConversationMessageOfUser
+from dapr.clients.grpc.conversation import ConversationInputAlpha2, ConversationMessage, ConversationMessageContent, ConversationMessageOfUser
with DaprClient() as d:
text_input = "What is dapr?"
diff --git a/conversation/python/sdk/conversation/tool_calling.py b/conversation/python/sdk/conversation/tool_calling.py
index 8afccd623..dd7137a6b 100644
--- a/conversation/python/sdk/conversation/tool_calling.py
+++ b/conversation/python/sdk/conversation/tool_calling.py
@@ -11,13 +11,12 @@
# limitations under the License.
# ------------------------------------------------------------
from dapr.clients import DaprClient
-from dapr.clients.grpc._request import (ConversationInputAlpha2, ConversationMessage, ConversationMessageContent,
- ConversationMessageOfUser, ConversationToolsFunction, ConversationTools)
+from dapr.clients.grpc import conversation
with DaprClient() as d:
provider_component = "echo"
- function = ConversationToolsFunction(
+ function = conversation.ConversationToolsFunction(
name="calculate",
description="Perform calculations",
parameters={
@@ -31,11 +30,13 @@
"required": ["expression"]
}
)
- calc_tool = ConversationTools(function=function)
+ calc_tool = conversation.ConversationTools(function=function)
textInput = "calculate square root of 15"
inputs = [
- ConversationInputAlpha2(messages=[ConversationMessage(of_user=ConversationMessageOfUser(content=[ConversationMessageContent(text=textInput)]))],
+ conversation.ConversationInputAlpha2(messages=[
+ conversation.ConversationMessage(of_user=conversation.ConversationMessageOfUser(
+ content=[conversation.ConversationMessageContent(text=textInput)]))],
scrub_pii=True),
]
@@ -50,3 +51,38 @@
for output in response.outputs:
print(f'Output response: {output.choices[0]}')
+
+ # ------------------------------------------------------------
+ # Using higher level API helpers
+ # ------------------------------------------------------------
+
+ # using @tool decorator helper for tool registration. The tools will be automatically registered in the SDK and
+ # will also be parsed to extract a json-schema representing the function signature so the LLM can understand how to use the tool.
+ @conversation.tool
+ def get_weather(location: str, unit: str) -> str:
+ """get weather from a location in the given unit"""
+ return f"The weather in {location} is 25 degrees {unit}."
+
+ textInput = "get weather in London in celsius"
+ # use create helper function (i.e.: create_user_message, create_system_message, etc...) to create inputs easily
+ inputs = [
+ conversation.ConversationInputAlpha2(messages=[conversation.create_user_message(textInput)], scrub_pii=True),
+ ]
+
+ print(f'Input sent: {textInput}')
+
+ response = d.converse_alpha2(
+ name=provider_component,
+ inputs=inputs,
+ temperature=0.7,
+ # use get_registered_tools helper function to get all registered tools
+ tools=conversation.get_registered_tools(),
+ )
+
+ for output in response.outputs:
+ print(f'Output response: {output.choices[0]}')
+
+ # registered tools are also automatically set to be invoked easily when called by the LLM
+ # using the method conversation.execute_registered_tool:
+ # >>> print(conversation.execute_registered_tool(name="get_weather", params={"location":"London", "unit":"celsius"}))
+
diff --git a/conversation/python/sdk/conversation/tool_calling_from_function.py b/conversation/python/sdk/conversation/tool_calling_from_function.py
deleted file mode 100644
index 7b9703176..000000000
--- a/conversation/python/sdk/conversation/tool_calling_from_function.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# ------------------------------------------------------------
-# Copyright 2025 The Dapr Authors
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ------------------------------------------------------------
-from dapr.clients import DaprClient
-from dapr.clients.grpc._request import (ConversationInputAlpha2, ConversationMessage, ConversationMessageContent,
- ConversationMessageOfUser, ConversationToolsFunction, ConversationTools)
-
-# The automated function to schema converter requires the function with its docstring and arguments typed and a
-# brief description of each in the docstring.
-def calculate(expression: str) -> str:
- """Perform calculations.
-
- It allows for the following calculations:
- - Square root of a number.
- - Addition, subtraction, multiplication and division.
-
- It CANNOT do trigonometry calculations.
-
- Args:
- expression (str): Math expression.
- """
- return expression
-
-def execute_converse_alpha2(text_input: str, tool: ConversationTools) -> None:
-
- # disable scrubbing of PII as some numbers can be matched as phone numbers
- inputs = [
- ConversationInputAlpha2(messages=[ConversationMessage(of_user=ConversationMessageOfUser(content=[ConversationMessageContent(text=text_input)]))],
- scrub_pii=False),
- ]
-
- print(f'Input sent: {text_input}')
-
- response = d.converse_alpha2(
- name=provider_component,
- inputs=inputs,
- temperature=0.7,
- tools=[calc_tool],
- )
-
- for output in response.outputs:
- print(f'Output response: {output.choices[0]}')
-
-with DaprClient() as d:
- provider_component = "echo" # Change to your provider component name
-
- # uses the function to schema converter to generate the ConversationToolsFunction automatically, especially the json-schema section
- func = ConversationToolsFunction.from_function(calculate)
- calc_tool = ConversationTools(function=func)
-
- print(f'Function schema generated: \n Name: {func.name}\n Description: {func.description}\n Parameters (json-schema): {func.parameters}\n')
-
- print('\n----------\n')
-
- print('First call to calculator should trigger a tool call on both real and echo providers:')
-
- execute_converse_alpha2("calculate square root of 15", calc_tool)
-
- print('\n----------\n')
- print('Second call to calculator should not trigger a tool call on a real provider but on echo provider it will still do the echo as a tool call:')
-
- execute_converse_alpha2("calculate the sine of 195.33", calc_tool)
-
diff --git a/conversation/python/sdk/dapr-tool-calling.yaml b/conversation/python/sdk/dapr-tool-calling.yaml
new file mode 100644
index 000000000..9cddba263
--- /dev/null
+++ b/conversation/python/sdk/dapr-tool-calling.yaml
@@ -0,0 +1,7 @@
+version: 1
+common:
+ resourcesPath: ../../components/
+apps:
+ - appID: conversation
+ appDirPath: ./conversation/
+ command: ["python3", "tool_calling.py"]
From 217d4cec611e3b5e1059a7ee0bf7f62a58a78f71 Mon Sep 17 00:00:00 2001
From: Filinto Duran <1373693+filintod@users.noreply.github.com>
Date: Sun, 17 Aug 2025 19:48:22 -0500
Subject: [PATCH 3/3] remove real llm provider info
Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com>
---
conversation/components/ollama.yaml | 14 --------------
conversation/components/openai.yaml | 12 ------------
2 files changed, 26 deletions(-)
delete mode 100644 conversation/components/ollama.yaml
delete mode 100644 conversation/components/openai.yaml
diff --git a/conversation/components/ollama.yaml b/conversation/components/ollama.yaml
deleted file mode 100644
index ac8122076..000000000
--- a/conversation/components/ollama.yaml
+++ /dev/null
@@ -1,14 +0,0 @@
-apiVersion: dapr.io/v1alpha1
-kind: Component
-metadata:
- name: ollama
-spec:
- type: conversation.openai
- version: v1
- metadata:
- - name: key
- value: 'ollama'
- - name: model
- value: gpt-oss:20b
- - name: endpoint
- value: 'http://localhost:11434/v1' # ollama endpoint https://ollama.com/blog/openai-compatibility
diff --git a/conversation/components/openai.yaml b/conversation/components/openai.yaml
deleted file mode 100644
index 29729c641..000000000
--- a/conversation/components/openai.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-apiVersion: dapr.io/v1alpha1
-kind: Component
-metadata:
- name: openai
-spec:
- type: conversation.openai
- version: v1
- metadata:
- - name: key
- value: "YOUR_OPENAI_API_KEY"
- - name: model
- value: gpt-4o-mini-2024-07-18