-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtest_llm_availability_fix.js
More file actions
67 lines (53 loc) · 2.68 KB
/
test_llm_availability_fix.js
File metadata and controls
67 lines (53 loc) · 2.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
/**
* Test the LLM availability fix in the frontend BOMProcessor logic
*/
const axios = require('axios');
const API_BASE_URL = 'http://localhost:3010';
const LLM_STATUS_URL = `${API_BASE_URL}/api/v1/llm/status`;
const BOM_LLM_STATUS_URL = `${API_BASE_URL}/api/v1/bom/llm-status`;
async function testLLMAvailabilityLogic() {
console.log('🔍 Testing LLM Availability Logic Fix...\n');
try {
// Test the primary endpoint
console.log('1. Testing /api/v1/llm/status endpoint:');
const response = await axios.get(LLM_STATUS_URL, { timeout: 10000 });
const data = response.data;
console.log('✅ API call successful');
console.log('Response format:', Object.keys(data).join(', '));
// Apply the FIXED logic from BOMProcessor line 287
const available = data.success && data.connection?.available && data.models.gpt_oss_120b.available;
console.log('\n🔧 Testing FIXED availability logic:');
console.log(' data.success:', data.success);
console.log(' data.connection?.available:', data.connection?.available);
console.log(' data.models.gpt_oss_120b.available:', data.models.gpt_oss_120b.available);
console.log(' Final result (AND of all):', available);
if (available) {
console.log('✅ FIXED LOGIC: AI Generation should show AVAILABLE');
} else {
console.log('❌ FIXED LOGIC: AI Generation will show UNAVAILABLE');
console.log(' Debug individual conditions:');
if (!data.success) console.log(' - API call failed');
if (!data.connection?.available) console.log(' - Ollama service not connected');
if (!data.models.gpt_oss_120b.available) console.log(' - gpt-oss:120b model not available');
}
// Also test the fallback endpoint for comparison
console.log('\n2. Testing fallback /api/v1/bom/llm-status endpoint:');
const bomResponse = await axios.get(BOM_LLM_STATUS_URL, { timeout: 10000 });
const bomData = bomResponse.data;
const bomAvailable = bomData.success && bomData.llm_available && bomData.status === 'operational';
console.log(' BOM endpoint availability:', bomAvailable);
// Compare results
console.log('\n📊 Comparison:');
console.log(' LLM endpoint result:', available);
console.log(' BOM endpoint result:', bomAvailable);
console.log(' Results match:', available === bomAvailable);
if (available === bomAvailable) {
console.log('✅ Both endpoints agree on availability status');
} else {
console.log('⚠️ Endpoints disagree - this might cause UI confusion');
}
} catch (error) {
console.error('❌ Test failed:', error.message);
}
}
testLLMAvailabilityLogic().catch(console.error);