169 lines
5.1 KiB
PHP
169 lines
5.1 KiB
PHP
<?php
|
|
|
|
namespace KupShop\LLMBundle\LlmProviders;
|
|
|
|
use GuzzleHttp\Psr7\Utils;
|
|
use KupShop\LLMBundle\Dto\PromptResponseStats;
|
|
use KupShop\LLMBundle\Dto\TextPromptInput;
|
|
use KupShop\LLMBundle\Exception\InvalidInputImageException;
|
|
use LLPhant\Chat\Message;
|
|
use LLPhant\Chat\Vision\ImageSource;
|
|
use LLPhant\Chat\Vision\VisionMessage;
|
|
use OpenAI\Exceptions\ErrorException;
|
|
use Psr\Http\Message\StreamInterface;
|
|
use Symfony\Component\DependencyInjection\Attribute\Autowire;
|
|
|
|
class OpenAIProvider extends AbstractLlmProvider
|
|
{
|
|
public const LABEL = 'openAI';
|
|
|
|
private string $model = 'gpt-4.1-mini';
|
|
|
|
public function __construct(#[Autowire('%kupshop.llm.token.openai%')] protected string $token)
|
|
{
|
|
}
|
|
|
|
public function getResponse(TextPromptInput $prompt): string
|
|
{
|
|
$chat = $this->createChat();
|
|
try {
|
|
$output = $chat->generateChat($this->preparePrompt($prompt->getMessages()));
|
|
} catch (ErrorException|InvalidInputImageException $e) {
|
|
return $e->getMessage();
|
|
}
|
|
$this->collectStats($chat->getLastResponse()->toArray());
|
|
|
|
return $output;
|
|
}
|
|
|
|
public function getStreamedResponse(TextPromptInput $prompt): StreamInterface
|
|
{
|
|
$chat = $this->createChat(stream: true);
|
|
try {
|
|
$response = $chat->generateChatStream($this->preparePrompt($prompt->getMessages()));
|
|
} catch (ErrorException|InvalidInputImageException $e) {
|
|
$message = $e->getMessage();
|
|
|
|
return Utils::streamFor(function ($len) use (&$i, &$message) {
|
|
$result = $message;
|
|
$message = false;
|
|
|
|
return $result ?: false;
|
|
});
|
|
}
|
|
|
|
return Utils::streamFor(function ($len) use ($response, $chat) {
|
|
if ($response->eof()) {
|
|
try {
|
|
$this->collectStats($chat->getLastStreamedResponse()->toArray());
|
|
} catch (\Throwable $e) {
|
|
\Sentry\captureException($e);
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
return $response->read($len);
|
|
});
|
|
}
|
|
|
|
/**
|
|
* @param Message[] $messages
|
|
*
|
|
* @return Message[]
|
|
*/
|
|
private function preparePrompt(array $messages): array
|
|
{
|
|
return $this->handlePromptImagesPlaceholders($messages);
|
|
}
|
|
|
|
/**
|
|
* extract {image:...} placeholders and attach images to the messages via VisionMessage.
|
|
*
|
|
* @param Message[] $messages
|
|
*
|
|
* @return Message[]
|
|
*/
|
|
public function handlePromptImagesPlaceholders(array $messages): array
|
|
{
|
|
for ($i = 0; $i < count($messages); $i++) {
|
|
$prompt = $messages[$i]->content;
|
|
|
|
$urls = [];
|
|
$imageIndex = 1;
|
|
$prompt = preg_replace_callback(
|
|
'/\{image:(.*?)\}/',
|
|
function ($matches) use (&$imageIndex, &$urls) {
|
|
$urls[] = $matches[1];
|
|
|
|
return '(viz. '.$imageIndex++.'. obrázek)';
|
|
},
|
|
$prompt
|
|
);
|
|
|
|
$imageSources = array_map(fn ($url) => new ImageSource($this->getImageBase64FromUrl($url)), array_filter($urls));
|
|
if (!empty($imageSources)) {
|
|
$visionMessage = VisionMessage::fromImages($imageSources, $prompt);
|
|
$messages[$i] = $visionMessage;
|
|
}
|
|
}
|
|
|
|
return $messages;
|
|
}
|
|
|
|
protected function getImageBase64FromUrl(string $url): string
|
|
{
|
|
if (isLocalDevelopment()) {
|
|
$arrContextOptions = stream_context_create([
|
|
'ssl' => [
|
|
'verify_peer' => false,
|
|
'verify_peer_name' => false,
|
|
],
|
|
]);
|
|
}
|
|
|
|
$imageData = @file_get_contents($url, context: $arrContextOptions ?? null);
|
|
if ($imageData === false) {
|
|
throw new InvalidInputImageException("Obrázek {$url} se nepodařilo stáhnout.");
|
|
}
|
|
|
|
$imageInfo = @getimagesizefromstring($imageData);
|
|
if ($imageInfo === false) {
|
|
throw new InvalidInputImageException("Obrázek {$url} není validní.");
|
|
}
|
|
|
|
return base64_encode($imageData);
|
|
}
|
|
|
|
protected function collectStats(array $response)
|
|
{
|
|
if (!($usage = ($response['usage'] ?? false))) {
|
|
return;
|
|
}
|
|
|
|
$this->lastResponseStats = (new PromptResponseStats())
|
|
->setTotalTokens($usage['total_tokens'])
|
|
->setInputTokens($usage['prompt_tokens'])
|
|
->setOutputTokens($usage['completion_tokens'])
|
|
->setModel($response['model'] ?? '');
|
|
}
|
|
|
|
protected function createChat(bool $stream = false): \LLPhant\Chat\OpenAIChat
|
|
{
|
|
$config = new \LLPhant\OpenAIConfig();
|
|
$config->apiKey = $this->token;
|
|
$config->model = $this->model;
|
|
$config->modelOptions['user'] = getShopName();
|
|
if ($stream) {
|
|
$config->modelOptions['stream_options'] = ['include_usage' => true];
|
|
}
|
|
|
|
return new \LLPhant\Chat\OpenAIChat($config);
|
|
}
|
|
|
|
public function setModel(string $model): void
|
|
{
|
|
$this->model = $model;
|
|
}
|
|
}
|