Skip to content
This repository was archived by the owner on Jul 16, 2025. It is now read-only.

Commit c68c093

Browse files
committed
feat: bring in Llama 3.3 support and example on Azure
1 parent 2ad9c25 commit c68c093

File tree

5 files changed

+119
-0
lines changed

5 files changed

+119
-0
lines changed

.env

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,10 @@ AZURE_OPENAI_DEPLOYMENT=
2121
AZURE_OPENAI_VERSION=
2222
AZURE_OPENAI_KEY=
2323

24+
# For using Llama on Azure
25+
AZURE_LLAMA_BASEURL=
26+
AZURE_LLAMA_KEY=
27+
2428
# For using OpenRouter
2529
OPENROUTER_KEY=
2630

examples/chat-llama-azure.php

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
<?php
2+
3+
use PhpLlm\LlmChain\Bridge\Meta\Llama;
4+
use PhpLlm\LlmChain\Bridge\Azure\Meta\PlatformFactory;
5+
use PhpLlm\LlmChain\Chain;
6+
use PhpLlm\LlmChain\Model\Message\Message;
7+
use PhpLlm\LlmChain\Model\Message\MessageBag;
8+
use Symfony\Component\Dotenv\Dotenv;
9+
10+
require_once dirname(__DIR__).'/vendor/autoload.php';
11+
(new Dotenv())->loadEnv(dirname(__DIR__).'/.env');
12+
13+
if (empty($_ENV['AZURE_LLAMA_BASEURL']) || empty($_ENV['AZURE_LLAMA_KEY'])) {
14+
echo 'Please set the AZURE_LLAMA_BASEURL and AZURE_LLAMA_KEY environment variable.'.PHP_EOL;
15+
exit(1);
16+
}
17+
18+
$platform = PlatformFactory::create($_ENV['AZURE_LLAMA_BASEURL'], $_ENV['AZURE_LLAMA_KEY']);
19+
$llm = new Llama(Llama::LLAMA_3_3_70B_INSTRUCT);
20+
21+
$chain = new Chain($platform, $llm);
22+
$messages = new MessageBag(Message::ofUser('I am going to Paris, what should I see?'));
23+
$response = $chain->call($messages, [
24+
'max_tokens' => 2048,
25+
'temperature' => 0.8,
26+
'top_p' => 0.1,
27+
'presence_penalty' => 0,
28+
'frequency_penalty' => 0,
29+
]);
30+
31+
echo $response->getContent().PHP_EOL;
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
<?php
2+
3+
declare(strict_types=1);
4+
5+
namespace PhpLlm\LlmChain\Bridge\Azure\Meta;
6+
7+
use PhpLlm\LlmChain\Bridge\Meta\Llama;
8+
use PhpLlm\LlmChain\Exception\RuntimeException;
9+
use PhpLlm\LlmChain\Model\Message\MessageBagInterface;
10+
use PhpLlm\LlmChain\Model\Model;
11+
use PhpLlm\LlmChain\Model\Response\ResponseInterface as LlmResponse;
12+
use PhpLlm\LlmChain\Model\Response\TextResponse;
13+
use PhpLlm\LlmChain\Platform\ModelClient;
14+
use PhpLlm\LlmChain\Platform\ResponseConverter;
15+
use Symfony\Contracts\HttpClient\HttpClientInterface;
16+
use Symfony\Contracts\HttpClient\ResponseInterface;
17+
use Webmozart\Assert\Assert;
18+
19+
final readonly class LlamaHandler implements ModelClient, ResponseConverter
20+
{
21+
public function __construct(
22+
private HttpClientInterface $httpClient,
23+
private string $baseUrl,
24+
#[\SensitiveParameter] private string $apiKey,
25+
) {
26+
}
27+
28+
public function supports(Model $model, object|array|string $input): bool
29+
{
30+
return $model instanceof Llama && $input instanceof MessageBagInterface;
31+
}
32+
33+
public function request(Model $model, object|array|string $input, array $options = []): ResponseInterface
34+
{
35+
Assert::isInstanceOf($input, MessageBagInterface::class);
36+
$url = sprintf('https://%s/chat/completions', $this->baseUrl);
37+
38+
return $this->httpClient->request('POST', $url, [
39+
'headers' => [
40+
'Content-Type' => 'application/json',
41+
'Authorization' => $this->apiKey,
42+
],
43+
'json' => array_merge($options, [
44+
'model' => $model->getVersion(),
45+
'messages' => $input,
46+
]),
47+
]);
48+
}
49+
50+
public function convert(ResponseInterface $response, array $options = []): LlmResponse
51+
{
52+
$data = $response->toArray();
53+
54+
if (!isset($data['choices'][0]['message']['content'])) {
55+
throw new RuntimeException('Response does not contain output');
56+
}
57+
58+
return new TextResponse($data['choices'][0]['message']['content']);
59+
}
60+
}
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
<?php
2+
3+
declare(strict_types=1);
4+
5+
namespace PhpLlm\LlmChain\Bridge\Azure\Meta;
6+
7+
use PhpLlm\LlmChain\Platform;
8+
use Symfony\Component\HttpClient\HttpClient;
9+
use Symfony\Contracts\HttpClient\HttpClientInterface;
10+
11+
final readonly class PlatformFactory
12+
{
13+
public static function create(
14+
string $baseUrl,
15+
#[\SensitiveParameter]
16+
string $apiKey,
17+
?HttpClientInterface $httpClient = null,
18+
): Platform {
19+
$modelClient = new LlamaHandler($httpClient ?? HttpClient::create(), $baseUrl, $apiKey);
20+
21+
return new Platform([$modelClient], [$modelClient]);
22+
}
23+
}

src/Bridge/Meta/Llama.php

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
final readonly class Llama implements LanguageModel
1010
{
11+
public const LLAMA_3_3_70B_INSTRUCT = 'llama-3.3-70B-Instruct';
1112
public const LLAMA_3_2_90B_VISION_INSTRUCT = 'llama-3.2-90b-vision-instruct';
1213
public const LLAMA_3_2_11B_VISION_INSTRUCT = 'llama-3.2-11b-vision-instruct';
1314
public const LLAMA_3_2_3B = 'llama-3.2-3b';

0 commit comments

Comments
 (0)