97 lines
2.8 KiB
TypeScript
97 lines
2.8 KiB
TypeScript
import { ILlmActionRequest } from './interfaces';
|
|
import { IModalContext } from '../context/ModalContext';
|
|
|
|
export const ABORT_USER = 'user';
|
|
|
|
export function getPromptResponse(body: ILlmActionRequest) {
|
|
const abortController = new AbortController();
|
|
|
|
const promise = new Promise<string>((resolve, reject) => {
|
|
fetch('llm/prompt-action', {
|
|
method: 'POST',
|
|
body: JSON.stringify(body),
|
|
signal: abortController.signal,
|
|
})
|
|
.then((res) => res.json())
|
|
.then((res) => {
|
|
resolve(res.value);
|
|
})
|
|
.catch((reason) => {
|
|
reject(reason);
|
|
});
|
|
});
|
|
|
|
return { promise, abort: (reason?: string) => abortController.abort(reason ?? ABORT_USER) };
|
|
}
|
|
|
|
export function getPromptStreamResponse(
|
|
body: ILlmActionRequest,
|
|
chunkCallback: (chunk: string) => void,
|
|
) {
|
|
const abortController = new AbortController();
|
|
|
|
const promise = new Promise<void>((resolve, reject) => {
|
|
fetch('llm/prompt-stream-action?payload=' + encodeURIComponent(JSON.stringify(body)), {
|
|
method: 'GET', // use GET because bunny CDN is retarded and does not support streaming over POST
|
|
signal: abortController.signal,
|
|
})
|
|
.then(async (response) => {
|
|
if (!response.body) {
|
|
reject();
|
|
return;
|
|
}
|
|
const reader = response.body.getReader();
|
|
const decoder = new TextDecoder();
|
|
|
|
let responseText: string = '';
|
|
// eslint-disable-next-line no-constant-condition
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) break;
|
|
const chunk = decoder.decode(value, { stream: true });
|
|
responseText += chunk;
|
|
chunkCallback(responseText);
|
|
}
|
|
resolve();
|
|
})
|
|
.catch((reason) => {
|
|
reject(reason);
|
|
});
|
|
});
|
|
|
|
return { promise, abort: () => abortController.abort(ABORT_USER) };
|
|
}
|
|
|
|
export interface PromptsList {
|
|
id: number;
|
|
title: string;
|
|
}
|
|
|
|
interface PromptDataResponse {
|
|
title: string;
|
|
prompt: string;
|
|
prompts: PromptsList[];
|
|
}
|
|
|
|
export async function getPromptData(body: ILlmActionRequest): Promise<PromptDataResponse> {
|
|
return new Promise((resolve, reject) => {
|
|
fetch('llm/modal-data', {
|
|
method: 'POST',
|
|
body: JSON.stringify(body),
|
|
})
|
|
.then((response) => response.json())
|
|
.then((value) => resolve(value))
|
|
.catch(() => reject());
|
|
});
|
|
}
|
|
|
|
export function getModalDataRequestBody(state: IModalContext): ILlmActionRequest {
|
|
return {
|
|
promptId: state.promptId,
|
|
objectLabel: state.objectLabel,
|
|
entityType: state.entityType,
|
|
entityId: state.entityId,
|
|
text: state.targetContent,
|
|
};
|
|
}
|