save current changes

This commit is contained in:
2025-08-17 17:47:54 +02:00
parent 60a5272054
commit dc4e57db2c
15 changed files with 944 additions and 68 deletions

115
src/lib/lmstudio.ts Normal file
View File

@ -0,0 +1,115 @@
import fs from 'fs';
import dotenv from 'dotenv';
import { logger } from './logger';
dotenv.config();
const LLM_BASE_URL = process.env.LLM_BASE_URL;
async function callLMStudio(prompt: string): Promise<any> {
if (!LLM_BASE_URL) {
throw new Error('LLM_BASE_URL is not defined in the .env file');
}
for (let i = 0; i < 10; i++) {
let llmResponse = "";
try {
const requestUrl = new URL('v1/chat/completions', LLM_BASE_URL);
const response = await fetch(requestUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: 'local-model',
messages: [
{
role: 'user',
content: prompt,
},
],
temperature: 0.7,
}),
});
const data = await response.json();
if (data.choices && data.choices.length > 0) {
const content = data.choices[0].message.content;
llmResponse = content;
const jsonMatch = content.match(/\{[\s\S]*\}/);
if (jsonMatch) {
return JSON.parse(jsonMatch[0]);
}
} else {
logger.error('Unexpected API response:', data);
}
} catch (error) {
logger.error(`Attempt ${i + 1} failed:`, error);
if (error instanceof TypeError && error.message.includes('fetch failed')) {
logger.error('Could not connect to the LM Studio server. Please ensure the server is running and accessible at the specified LLM_BASE_URL.');
}
logger.debug(`LLM response: ${llmResponse}`)
}
}
throw new Error('Failed to get response from LLM after 10 attempts');
}
async function callLMStudioWithFile(imagePath: string, prompt: string): Promise<any> {
if (!LLM_BASE_URL) {
throw new Error('LLM_BASE_URL is not defined in the .env file');
}
const imageBuffer = fs.readFileSync(imagePath);
const base64Image = imageBuffer.toString('base64');
for (let i = 0; i < 10; i++) {
let llmResponse = "";
try {
const requestUrl = new URL('v1/chat/completions', LLM_BASE_URL);
const response = await fetch(requestUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: 'local-model',
messages: [
{
role: 'user',
content: [
{ type: 'image_url', image_url: { url: `data:image/jpeg;base64,${base64Image}` } },
{ type: 'text', text: prompt },
],
},
],
temperature: 0.7,
}),
});
const data = await response.json();
if (data.choices && data.choices.length > 0) {
const content = data.choices[0].message.content;
llmResponse = content;
const jsonMatch = content.match(/\{[\s\S]*\}/);
if (jsonMatch) {
return JSON.parse(jsonMatch[0]);
}
} else {
logger.error('Unexpected API response:', data);
}
} catch (error) {
logger.error(`Attempt ${i + 1} failed:`, error);
if (error instanceof TypeError && error.message.includes('fetch failed')) {
logger.error('Could not connect to the LM Studio server. Please ensure the server is running and accessible at the specified LLM_BASE_URL.');
}
logger.debug(`LLM response: ${llmResponse}`)
}
}
throw new Error('Failed to describe image after 10 attempts');
}
export { callLMStudio, callLMStudioWithFile };