Skip to content
35 changes: 35 additions & 0 deletions examples/ollama/chat-ollama-streaming.php
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's call the file only stream.php - it is more consistent with the other stream examples, and ollama is basically the folder already

Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
<?php

/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/

use Symfony\AI\Agent\Agent;
use Symfony\AI\Platform\Bridge\Ollama\Ollama;
use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory;
use Symfony\AI\Platform\Message\Message;
use Symfony\AI\Platform\Message\MessageBag;

require_once dirname(__DIR__).'/bootstrap.php';

$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
$model = new Ollama();

$agent = new Agent($platform, $model, logger: logger());
$messages = new MessageBag(
Message::forSystem('You are a helpful assistant.'),
Message::ofUser('Tina has one brother and one sister. How many sisters do Tina\'s siblings have?'),
);

// Stream the response
$result = $agent->call($messages, ['stream' => true]);

// Emit each chunk as it is received
foreach ($result as $chunk) {
echo $chunk->getContent();
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
}
}
echo \PHP_EOL;

1 change: 1 addition & 0 deletions src/platform/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,5 +60,6 @@ CHANGELOG
* Add InMemoryPlatform and InMemoryRawResult for testing Platform without external Providers calls
* Add tool calling support for Ollama platform
* Allow beta feature flags to be passed into Anthropic model options
* Add Ollama streaming output support


60 changes: 60 additions & 0 deletions src/platform/src/Bridge/Ollama/OllamaMessageChunk.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
<?php

/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/

namespace Symfony\AI\Platform\Bridge\Ollama;

/**
* @author Shaun Johnston <[email protected]>
*/
final readonly class OllamaMessageChunk
{
/**
* @param array<string, mixed> $message
*/
public function __construct(
public readonly string $model,
public readonly \DateTimeImmutable $created_at,
public readonly array $message,
public readonly bool $done,
) {
}

public function __toString(): string
{
// Return the assistant's message content if available
return $this->message['content'] ?? '';
}

public static function fromJsonString(string $json): ?self
{
$data = json_decode($json, true);
if (!\is_array($data)) {
return null;
}
Comment on lines +38 to +41
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please move this to the result converter - it's their job to convert to data. fine with having the value object, but let's keep it simple than 👍


return new self(
$data['model'] ?? '',
new \DateTimeImmutable($data['created_at'] ?? ''),
$data['message'] ?? [],
$data['done'] ?? false
);
}

public function getContent(): ?string
{
return $this->message['content'] ?? null;
}

public function getRole(): ?string
{
return $this->message['role'] ?? null;
}
}
23 changes: 23 additions & 0 deletions src/platform/src/Bridge/Ollama/OllamaResultConverter.php
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,17 @@
use Symfony\AI\Platform\Model;
use Symfony\AI\Platform\Result\RawResultInterface;
use Symfony\AI\Platform\Result\ResultInterface;
use Symfony\AI\Platform\Result\StreamResult;
use Symfony\AI\Platform\Result\TextResult;
use Symfony\AI\Platform\Result\ToolCall;
use Symfony\AI\Platform\Result\ToolCallResult;
use Symfony\AI\Platform\Result\VectorResult;
use Symfony\AI\Platform\ResultConverterInterface;
use Symfony\AI\Platform\Vector\Vector;
use Symfony\Component\HttpClient\Chunk\FirstChunk;
use Symfony\Component\HttpClient\Chunk\LastChunk;
use Symfony\Component\HttpClient\EventSourceHttpClient;
use Symfony\Contracts\HttpClient\ResponseInterface;

/**
* @author Christopher Hertel <[email protected]>
Expand All @@ -34,6 +39,10 @@ public function supports(Model $model): bool

public function convert(RawResultInterface $result, array $options = []): ResultInterface
{
if ($options['stream'] ?? false) {
return new StreamResult($this->convertStream($result->getObject()));
}

$data = $result->getData();

return \array_key_exists('embeddings', $data)
Expand Down Expand Up @@ -83,4 +92,18 @@ public function doConvertEmbeddings(array $data): ResultInterface
),
);
}

private function convertStream(ResponseInterface $result): \Generator
{
foreach ((new EventSourceHttpClient())->stream($result) as $chunk) {
if ($chunk instanceof FirstChunk || $chunk instanceof LastChunk) {
continue;
}

$msg = OllamaMessageChunk::fromJsonString($chunk->getContent());
if ($msg) {
yield $msg;
}
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Question for @chr-hertel (not relevant for the review), isn't the same situation that we're facing in #324?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's not since this is only working on the response in the ResultConverter, which is only called when finally fetching the content. In #324 the stream is called already in the client

}
}
105 changes: 105 additions & 0 deletions src/platform/tests/Bridge/Ollama/OllamaClientTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,12 @@
use PHPUnit\Framework\TestCase;
use Symfony\AI\Platform\Bridge\Ollama\Ollama;
use Symfony\AI\Platform\Bridge\Ollama\OllamaClient;
use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory;
use Symfony\AI\Platform\Model;
use Symfony\AI\Platform\Result\StreamResult;
use Symfony\Component\HttpClient\MockHttpClient;
use Symfony\Component\HttpClient\Response\JsonMockResponse;
use Symfony\Component\HttpClient\Response\MockResponse;

#[CoversClass(OllamaClient::class)]
#[UsesClass(Ollama::class)]
Expand Down Expand Up @@ -87,4 +90,106 @@ public function testOutputStructureIsSupported()
'done' => true,
], $response->getData());
}

public function testStreamingIsSupported()
{
$httpClient = new MockHttpClient([
new JsonMockResponse([
'capabilities' => ['completion'],
]),
new MockResponse('data: '.json_encode([
'model' => 'llama3.2',
'created_at' => '2025-08-23T10:00:00Z',
'message' => ['role' => 'assistant', 'content' => 'Hello world'],
'done' => true,
])."\n\n", [
'response_headers' => [
'content-type' => 'text/event-stream',
],
]),
], 'http://127.0.0.1:1234');

$platform = PlatformFactory::create('http://127.0.0.1:1234', $httpClient);
$response = $platform->invoke(new Ollama(), [
'messages' => [
[
'role' => 'user',
'content' => 'Say hello world',
],
],
'model' => 'llama3.2',
], [
'stream' => true,
]);

$result = $response->getResult();

$this->assertInstanceOf(StreamResult::class, $result);
$this->assertInstanceOf(\Generator::class, $result->getContent());
$this->assertSame(2, $httpClient->getRequestsCount());
}

public function testStreamingConverterWithDirectResponse()
{
$streamingData = 'data: '.json_encode([
'model' => 'llama3.2',
'created_at' => '2025-08-23T10:00:00Z',
'message' => ['role' => 'assistant', 'content' => 'Hello'],
'done' => false,
])."\n\n".
'data: '.json_encode([
'model' => 'llama3.2',
'created_at' => '2025-08-23T10:00:01Z',
'message' => ['role' => 'assistant', 'content' => ' world'],
'done' => true,
])."\n\n";

$mockHttpClient = new MockHttpClient([
new MockResponse($streamingData, [
'response_headers' => [
'content-type' => 'text/event-stream',
],
]),
]);

$mockResponse = $mockHttpClient->request('GET', 'http://test.example');
$rawResult = new \Symfony\AI\Platform\Result\RawHttpResult($mockResponse);
$converter = new \Symfony\AI\Platform\Bridge\Ollama\OllamaResultConverter();

$result = $converter->convert($rawResult, ['stream' => true]);

$this->assertInstanceOf(StreamResult::class, $result);
$this->assertInstanceOf(\Generator::class, $result->getContent());

$regularMockHttpClient = new MockHttpClient([
new JsonMockResponse([
'model' => 'llama3.2',
'message' => ['role' => 'assistant', 'content' => 'Hello world'],
'done' => true,
]),
]);

$regularMockResponse = $regularMockHttpClient->request('GET', 'http://test.example');
$regularRawResult = new \Symfony\AI\Platform\Result\RawHttpResult($regularMockResponse);
$regularResult = $converter->convert($regularRawResult, ['stream' => false]);

$this->assertNotInstanceOf(StreamResult::class, $regularResult);
}

public function testOllamaMessageChunkParsing()
{
$jsonData = json_encode([
'model' => 'llama3.2',
'created_at' => '2025-08-23T10:00:00Z',
'message' => ['role' => 'assistant', 'content' => 'Hello world'],
'done' => true,
]);

$chunk = \Symfony\AI\Platform\Bridge\Ollama\OllamaMessageChunk::fromJsonString($jsonData);

$this->assertNotNull($chunk);
$this->assertSame('Hello world', (string) $chunk);
$this->assertSame('llama3.2', $chunk->model);
$this->assertTrue($chunk->done);
}
}