new musicspot_generator/v2 files

This commit is contained in:
2025-09-23 20:19:59 +02:00
parent 5d1cbf8c09
commit d74ad1b034
7 changed files with 5115 additions and 21 deletions

View File

@ -4,25 +4,27 @@ import { logger } from './logger';
dotenv.config();
const LLM_BASE_URL = process.env.LLM_BASE_URL;
const LMSTUDIO_BASE_URL = process.env.LMSTUDIO_BASE_URL;
const LMSTUDIO_API_KEY = process.env.LMSTUDIO_API_KEY;
const LMSTUDIO_MODEL = process.env.LMSTUDIO_MODEL;
async function callLMStudio(prompt: string): Promise<any> {
if (!LLM_BASE_URL) {
throw new Error('LLM_BASE_URL is not defined in the .env file');
async function callLmstudio(prompt: string): Promise<any> {
if (!LMSTUDIO_BASE_URL) {
throw new Error('LMSTUDIO_BASE_URL is not defined in the .env file');
}
for (let i = 0; i < 10; i++) {
let llmResponse = "";
try {
const requestUrl = new URL('v1/chat/completions', LLM_BASE_URL);
const response = await fetch(requestUrl, {
const response = await fetch(`${LMSTUDIO_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${LMSTUDIO_API_KEY}`,
},
body: JSON.stringify({
model: 'local-model',
model: LMSTUDIO_MODEL,
messages: [
{
role: 'user',
@ -40,15 +42,19 @@ async function callLMStudio(prompt: string): Promise<any> {
const jsonMatch = content.match(/\{[\s\S]*\}/);
if (jsonMatch) {
return JSON.parse(jsonMatch[0]);
} else {
const arrayMatch = content.match(/\[[\s\S]*\]/);
if (arrayMatch) {
return JSON.parse(arrayMatch[0]);
}
}
// If no JSON/array found, return the raw content
return content;
} else {
logger.error('Unexpected API response:', data);
}
} catch (error) {
logger.error(`Attempt ${i + 1} failed:`, error);
if (error instanceof TypeError && error.message.includes('fetch failed')) {
logger.error('Could not connect to the LM Studio server. Please ensure the server is running and accessible at the specified LLM_BASE_URL.');
}
logger.debug(`LLM response: ${llmResponse}`)
}
}
@ -56,9 +62,9 @@ async function callLMStudio(prompt: string): Promise<any> {
throw new Error('Failed to get response from LLM after 10 attempts');
}
async function callLMStudioWithFile(imagePath: string, prompt: string): Promise<any> {
if (!LLM_BASE_URL) {
throw new Error('LLM_BASE_URL is not defined in the .env file');
async function callLMStudioAPIWithFile(imagePath: string, prompt: string): Promise<any> {
if (!LMSTUDIO_BASE_URL) {
throw new Error('LMSTUDIO_BASE_URL is not defined in the .env file');
}
const imageBuffer = fs.readFileSync(imagePath);
@ -68,14 +74,14 @@ async function callLMStudioWithFile(imagePath: string, prompt: string): Promise<
let llmResponse = "";
try {
const requestUrl = new URL('v1/chat/completions', LLM_BASE_URL);
const response = await fetch(requestUrl, {
const response = await fetch(`${LMSTUDIO_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${LMSTUDIO_API_KEY}`,
},
body: JSON.stringify({
model: 'local-model',
model: LMSTUDIO_MODEL,
messages: [
{
role: 'user',
@ -96,15 +102,17 @@ async function callLMStudioWithFile(imagePath: string, prompt: string): Promise<
const jsonMatch = content.match(/\{[\s\S]*\}/);
if (jsonMatch) {
return JSON.parse(jsonMatch[0]);
} else {
const arrayMatch = content.match(/\[[\s\S]*\]/);
if (arrayMatch) {
return JSON.parse(arrayMatch[0]);
}
}
} else {
logger.error('Unexpected API response:', data);
}
} catch (error) {
logger.error(`Attempt ${i + 1} failed:`, error);
if (error instanceof TypeError && error.message.includes('fetch failed')) {
logger.error('Could not connect to the LM Studio server. Please ensure the server is running and accessible at the specified LLM_BASE_URL.');
}
logger.debug(`LLM response: ${llmResponse}`)
}
}
@ -112,4 +120,4 @@ async function callLMStudioWithFile(imagePath: string, prompt: string): Promise<
throw new Error('Failed to describe image after 10 attempts');
}
export { callLMStudio, callLMStudioWithFile };
export { callLmstudio, callLMStudioAPIWithFile };