Skip to content

feat: add support for mistral #291

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 1, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@ OPENAI_API_KEY=
# For using Claude on Anthropic
ANTHROPIC_API_KEY=

# For using Mistral
MISTRAL_API_KEY=

# For using Voyage
VOYAGE_API_KEY=

Expand Down
7 changes: 6 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,11 @@ $embeddings = new Embeddings();
* [Google's Gemini](https://gemini.google.com/) with [Google](https://ai.google.dev/) and [OpenRouter](https://www.openrouter.com/) as Platform
* [DeepSeek's R1](https://www.deepseek.com/) with [OpenRouter](https://www.openrouter.com/) as Platform
* [Amazon's Nova](https://nova.amazon.com) with [AWS](https://aws.amazon.com/bedrock/) as Platform
* [Mistral's Mistral](https://www.mistral.ai/) with [Mistral](https://www.mistral.ai/) as Platform
* Embeddings Models
* [OpenAI's Text Embeddings](https://platform.openai.com/docs/guides/embeddings/embedding-models) with [OpenAI](https://platform.openai.com/docs/overview) and [Azure](https://learn.microsoft.com/azure/ai-services/openai/concepts/models) as Platform
* [Voyage's Embeddings](https://docs.voyageai.com/docs/embeddings) with [Voyage](https://www.voyageai.com/) as Platform
* [Mistral Embed](https://www.mistral.ai/) with [Mistral](https://www.mistral.ai/) as Platform
* Other Models
* [OpenAI's Dall·E](https://platform.openai.com/docs/guides/image-generation) with [OpenAI](https://platform.openai.com/docs/overview) as Platform
* [OpenAI's Whisper](https://platform.openai.com/docs/guides/speech-to-text) with [OpenAI](https://platform.openai.com/docs/overview) and [Azure](https://learn.microsoft.com/azure/ai-services/openai/concepts/models) as Platform
Expand Down Expand Up @@ -137,6 +139,7 @@ $response = $chain->call($messages, [
1. [Meta's Llama with Replicate](examples/replicate/chat-llama.php)
1. [Google's Gemini with Google](examples/google/chat.php)
1. [Google's Gemini with OpenRouter](examples/openrouter/chat-gemini.php)
1. [Mistral's Mistral with Mistral](examples/mistral/chat-mistral.php)

### Tools

Expand Down Expand Up @@ -409,7 +412,7 @@ use PhpLlm\LlmChain\Platform\Message\MessageBag;

// Initialize Platform & Models

$similaritySearch = new SimilaritySearch($embeddings, $store);
$similaritySearch = new SimilaritySearch($model, $store);
$toolbox = Toolbox::create($similaritySearch);
$processor = new Chain($toolbox);
$chain = new Chain($platform, $model, [$processor], [$processor]);
Expand Down Expand Up @@ -547,6 +550,7 @@ needs to be used.

1. [Streaming Claude](examples/anthropic/stream.php)
1. [Streaming GPT](examples/openai/stream.php)
1. [Streaming Mistral](examples/mistral/stream.php)

### Image Processing

Expand Down Expand Up @@ -623,6 +627,7 @@ dump($vectors[0]->getData()); // Array of float values

1. [OpenAI's Emebddings](examples/openai/embeddings.php)
1. [Voyage's Embeddings](examples/voyage/embeddings.php)
1. [Mistral's Embed](examples/mistral/embeddings.php)

### Parallel Platform Calls

Expand Down
27 changes: 27 additions & 0 deletions examples/mistral/chat.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
<?php

use PhpLlm\LlmChain\Chain\Chain;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\Mistral;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\PlatformFactory;
use PhpLlm\LlmChain\Platform\Message\Message;
use PhpLlm\LlmChain\Platform\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;

require_once dirname(__DIR__, 2).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__, 2).'/.env');

if (empty($_ENV['MISTRAL_API_KEY'])) {
echo 'Please set the REPLICATE_API_KEY environment variable.'.\PHP_EOL;
exit(1);
}

$platform = PlatformFactory::create($_ENV['MISTRAL_API_KEY']);
$model = new Mistral();
$chain = new Chain($platform, $model);

$messages = new MessageBag(Message::ofUser('What is the best French cheese?'));
$response = $chain->call($messages, [
'temperature' => 0.7,
]);

echo $response->getContent().\PHP_EOL;
28 changes: 28 additions & 0 deletions examples/mistral/embeddings.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
<?php

use PhpLlm\LlmChain\Platform\Bridge\Mistral\Embeddings;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\PlatformFactory;
use PhpLlm\LlmChain\Platform\Response\VectorResponse;
use Symfony\Component\Dotenv\Dotenv;

require_once dirname(__DIR__, 2).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__, 2).'/.env');

if (empty($_ENV['MISTRAL_API_KEY'])) {
echo 'Please set the MISTRAL_API_KEY environment variable.'.\PHP_EOL;
exit(1);
}

$platform = PlatformFactory::create($_ENV['MISTRAL_API_KEY']);
$model = new Embeddings();

$response = $platform->request($model, <<<TEXT
In the middle of the 20th century, food scientists began to understand the importance of vitamins and minerals in
human health. They discovered that certain nutrients were essential for growth, development, and overall well-being.
This led to the fortification of foods with vitamins and minerals, such as adding vitamin D to milk and iodine to
salt. The goal was to prevent deficiencies and promote better health in the population.
TEXT);

assert($response instanceof VectorResponse);

echo 'Dimensions: '.$response->getContent()[0]->getDimensions().\PHP_EOL;
32 changes: 32 additions & 0 deletions examples/mistral/image.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
<?php

use PhpLlm\LlmChain\Chain\Chain;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\Mistral;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\PlatformFactory;
use PhpLlm\LlmChain\Platform\Message\Content\Image;
use PhpLlm\LlmChain\Platform\Message\Message;
use PhpLlm\LlmChain\Platform\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;

require_once dirname(__DIR__, 2).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__, 2).'/.env');

if (empty($_ENV['OPENAI_API_KEY'])) {
echo 'Please set the OPENAI_API_KEY environment variable.'.\PHP_EOL;
exit(1);
}

$platform = PlatformFactory::create($_ENV['MISTRAL_API_KEY']);
$model = new Mistral(Mistral::MISTRAL_SMALL);
$chain = new Chain($platform, $model);

$messages = new MessageBag(
Message::forSystem('You are an image analyzer bot that helps identify the content of images.'),
Message::ofUser(
'Describe the image as a comedian would do it.',
Image::fromFile(dirname(__DIR__, 2).'/tests/Fixture/image.jpg'),
),
);
$response = $chain->call($messages);

echo $response->getContent().\PHP_EOL;
30 changes: 30 additions & 0 deletions examples/mistral/stream.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
<?php

use PhpLlm\LlmChain\Chain\Chain;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\Mistral;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\PlatformFactory;
use PhpLlm\LlmChain\Platform\Message\Message;
use PhpLlm\LlmChain\Platform\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;

require_once dirname(__DIR__, 2).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__, 2).'/.env');

if (empty($_ENV['MISTRAL_API_KEY'])) {
echo 'Please set the REPLICATE_API_KEY environment variable.'.\PHP_EOL;
exit(1);
}

$platform = PlatformFactory::create($_ENV['MISTRAL_API_KEY']);
$model = new Mistral();
$chain = new Chain($platform, $model);

$messages = new MessageBag(Message::ofUser('What is the eighth prime number?'));
$response = $chain->call($messages, [
'stream' => true,
]);

foreach ($response->getContent() as $word) {
echo $word;
}
echo \PHP_EOL;
36 changes: 36 additions & 0 deletions examples/mistral/structured-output-math.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
<?php

use PhpLlm\LlmChain\Chain\Chain;
use PhpLlm\LlmChain\Chain\StructuredOutput\ChainProcessor;
use PhpLlm\LlmChain\Chain\StructuredOutput\ResponseFormatFactory;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\Mistral;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\PlatformFactory;
use PhpLlm\LlmChain\Platform\Message\Message;
use PhpLlm\LlmChain\Platform\Message\MessageBag;
use PhpLlm\LlmChain\Tests\Fixture\StructuredOutput\MathReasoning;
use Symfony\Component\Dotenv\Dotenv;
use Symfony\Component\Serializer\Encoder\JsonEncoder;
use Symfony\Component\Serializer\Normalizer\ObjectNormalizer;
use Symfony\Component\Serializer\Serializer;

require_once dirname(__DIR__, 2).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__, 2).'/.env');

if (empty($_ENV['MISTRAL_API_KEY'])) {
echo 'Please set the MISTRAL_API_KEY environment variable.'.\PHP_EOL;
exit(1);
}

$platform = PlatformFactory::create($_ENV['MISTRAL_API_KEY']);
$model = new Mistral(Mistral::MISTRAL_SMALL);
$serializer = new Serializer([new ObjectNormalizer()], [new JsonEncoder()]);

$processor = new ChainProcessor(new ResponseFormatFactory(), $serializer);
$chain = new Chain($platform, $model, [$processor], [$processor]);
$messages = new MessageBag(
Message::forSystem('You are a helpful math tutor. Guide the user through the solution step by step.'),
Message::ofUser('how can I solve 8x + 7 = -23'),
);
$response = $chain->call($messages, ['output_structure' => MathReasoning::class]);

dump($response->getContent());
38 changes: 38 additions & 0 deletions examples/mistral/toolcall-stream.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
<?php

use PhpLlm\LlmChain\Chain\Chain;
use PhpLlm\LlmChain\Chain\Toolbox\ChainProcessor;
use PhpLlm\LlmChain\Chain\Toolbox\Tool\YouTubeTranscriber;
use PhpLlm\LlmChain\Chain\Toolbox\Toolbox;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\Mistral;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\PlatformFactory;
use PhpLlm\LlmChain\Platform\Message\Message;
use PhpLlm\LlmChain\Platform\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;
use Symfony\Component\HttpClient\HttpClient;

require_once dirname(__DIR__, 2).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__, 2).'/.env');

if (empty($_ENV['MISTRAL_API_KEY'])) {
echo 'Please set the REPLICATE_API_KEY environment variable.'.\PHP_EOL;
exit(1);
}

$platform = PlatformFactory::create($_ENV['MISTRAL_API_KEY']);
$model = new Mistral();

$transcriber = new YouTubeTranscriber(HttpClient::create());
$toolbox = Toolbox::create($transcriber);
$processor = new ChainProcessor($toolbox);
$chain = new Chain($platform, $model, [$processor], [$processor]);

$messages = new MessageBag(Message::ofUser('Please summarize this video for me: https://www.youtube.com/watch?v=6uXW-ulpj0s'));
$response = $chain->call($messages, [
'stream' => true,
]);

foreach ($response->getContent() as $word) {
echo $word;
}
echo \PHP_EOL;
31 changes: 31 additions & 0 deletions examples/mistral/toolcall.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
<?php

use PhpLlm\LlmChain\Chain\Chain;
use PhpLlm\LlmChain\Chain\Toolbox\ChainProcessor;
use PhpLlm\LlmChain\Chain\Toolbox\Tool\Clock;
use PhpLlm\LlmChain\Chain\Toolbox\Toolbox;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\Mistral;
use PhpLlm\LlmChain\Platform\Bridge\Mistral\PlatformFactory;
use PhpLlm\LlmChain\Platform\Message\Message;
use PhpLlm\LlmChain\Platform\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;

require_once dirname(__DIR__, 2).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__, 2).'/.env');

if (empty($_ENV['MISTRAL_API_KEY'])) {
echo 'Please set the REPLICATE_API_KEY environment variable.'.\PHP_EOL;
exit(1);
}

$platform = PlatformFactory::create($_ENV['MISTRAL_API_KEY']);
$model = new Mistral();

$toolbox = Toolbox::create(new Clock());
$processor = new ChainProcessor($toolbox);
$chain = new Chain($platform, $model, [$processor], [$processor]);

$messages = new MessageBag(Message::ofUser('What time is it?'));
$response = $chain->call($messages);

echo $response->getContent().\PHP_EOL;
17 changes: 17 additions & 0 deletions src/Platform/Bridge/Mistral/Contract/ToolNormalizer.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
<?php

namespace PhpLlm\LlmChain\Platform\Bridge\Mistral\Contract;

use PhpLlm\LlmChain\Platform\Contract\Normalizer\ToolNormalizer as BaseToolNormalizer;

class ToolNormalizer extends BaseToolNormalizer
{
public function normalize(mixed $data, ?string $format = null, array $context = []): array
{
$array = parent::normalize($data, $format, $context);

$array['function']['parameters'] ??= ['type' => 'object'];

return $array;
}
}
23 changes: 23 additions & 0 deletions src/Platform/Bridge/Mistral/Embeddings.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
<?php

declare(strict_types=1);

namespace PhpLlm\LlmChain\Platform\Bridge\Mistral;

use PhpLlm\LlmChain\Platform\Capability;
use PhpLlm\LlmChain\Platform\Model;

final class Embeddings extends Model
{
public const MISTRAL_EMBED = 'mistral-embed';

/**
* @param array<string, mixed> $options
*/
public function __construct(
string $name = self::MISTRAL_EMBED,
array $options = [],
) {
parent::__construct($name, [Capability::INPUT_MULTIPLE], $options);
}
}
44 changes: 44 additions & 0 deletions src/Platform/Bridge/Mistral/Embeddings/ModelClient.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
<?php

declare(strict_types=1);

namespace PhpLlm\LlmChain\Platform\Bridge\Mistral\Embeddings;

use PhpLlm\LlmChain\Platform\Bridge\Mistral\Embeddings;
use PhpLlm\LlmChain\Platform\Model;
use PhpLlm\LlmChain\Platform\ModelClientInterface;
use Symfony\Component\HttpClient\EventSourceHttpClient;
use Symfony\Contracts\HttpClient\HttpClientInterface;
use Symfony\Contracts\HttpClient\ResponseInterface;

final readonly class ModelClient implements ModelClientInterface
{
private EventSourceHttpClient $httpClient;

public function __construct(
HttpClientInterface $httpClient,
#[\SensitiveParameter]
private string $apiKey,
) {
$this->httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient);
}

public function supports(Model $model): bool
{
return $model instanceof Embeddings;
}

public function request(Model $model, array|string $payload, array $options = []): ResponseInterface
{
return $this->httpClient->request('POST', 'https://api.mistral.ai/v1/embeddings', [
'auth_bearer' => $this->apiKey,
'headers' => [
'Content-Type' => 'application/json',
],
'json' => array_merge($options, [
'model' => $model->getName(),
'input' => $payload,
]),
]);
}
}
Loading