Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
162 changes: 157 additions & 5 deletions app.js
Original file line number Diff line number Diff line change
Expand Up @@ -1913,6 +1913,11 @@ function setupEventListeners() {
});
});

// Ollama detect models button
document.getElementById('ollama-detect-btn').addEventListener('click', async () => {
await detectOllamaModels();
});

document.getElementById('settings-modal').addEventListener('click', (e) => {
if (e.target.id === 'settings-modal') {
document.getElementById('settings-modal').classList.remove('visible');
Expand Down Expand Up @@ -2928,9 +2933,10 @@ async function aiTranslateAll() {
// Get selected provider and API key
const provider = getSelectedProvider();
const providerConfig = llmProviders[provider];
const apiKey = localStorage.getItem(providerConfig.storageKey);
const apiKey = providerConfig.storageKey ? localStorage.getItem(providerConfig.storageKey) : null;

if (!apiKey) {
// Ollama doesn't need an API key, other providers do
if (!providerConfig.isLocal && !apiKey) {
setTranslateStatus(`Add your LLM API key in Settings to use AI translation.`, 'error');
return;
}
Expand Down Expand Up @@ -2985,6 +2991,8 @@ Translate to these language codes: ${targetLangs.join(', ')}`;
responseText = await translateWithOpenAI(apiKey, prompt);
} else if (provider === 'google') {
responseText = await translateWithGoogle(apiKey, prompt);
} else if (provider === 'ollama') {
responseText = await translateWithOllama(prompt);
}

// Clean up response - remove markdown code blocks if present
Expand All @@ -3011,7 +3019,14 @@ Translate to these language codes: ${targetLangs.join(', ')}`;
console.error('Translation error:', error);

if (error.message === 'Failed to fetch') {
setTranslateStatus('Connection failed. Check your API key in Settings.', 'error');
const provider = getSelectedProvider();
if (provider === 'ollama') {
setTranslateStatus('Connection failed. Is Ollama running? Check Settings.', 'error');
} else {
setTranslateStatus('Connection failed. Check your API key in Settings.', 'error');
}
} else if (error.message === 'OLLAMA_MODEL_NOT_FOUND') {
setTranslateStatus('Model not found. Pull it first with: ollama pull <model>', 'error');
} else if (error.message === 'AI_UNAVAILABLE' || error.message.includes('401') || error.message.includes('403')) {
setTranslateStatus('Invalid API key. Update it in Settings (gear icon).', 'error');
} else {
Expand Down Expand Up @@ -3241,9 +3256,10 @@ async function translateAllText() {
// Get selected provider and API key
const provider = getSelectedProvider();
const providerConfig = llmProviders[provider];
const apiKey = localStorage.getItem(providerConfig.storageKey);
const apiKey = providerConfig.storageKey ? localStorage.getItem(providerConfig.storageKey) : null;

if (!apiKey) {
// Ollama doesn't need an API key, other providers do
if (!providerConfig.isLocal && !apiKey) {
await showAppAlert('Add your LLM API key in Settings to use AI translation.', 'error');
return;
}
Expand Down Expand Up @@ -3381,6 +3397,8 @@ Translate to these language codes: ${targetLangs.join(', ')}`;
responseText = await translateWithOpenAI(apiKey, prompt);
} else if (provider === 'google') {
responseText = await translateWithGoogle(apiKey, prompt);
} else if (provider === 'ollama') {
responseText = await translateWithOllama(prompt);
}

updateStatus('Processing response...', 'Parsing translations');
Expand Down Expand Up @@ -3534,6 +3552,32 @@ async function translateWithGoogle(apiKey, prompt) {
return data.candidates[0].content.parts[0].text;
}

async function translateWithOllama(prompt) {
const model = getSelectedModel('ollama');
const baseUrl = getOllamaUrl();

const response = await fetch(`${baseUrl}/api/chat`, {
Comment on lines +3558 to +3559
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The URL concatenation doesn't handle trailing slashes properly. If the user enters a URL with a trailing slash (e.g., 'http://localhost:11434/'), the resulting URL will be 'http://localhost:11434//api/chat' with a double slash. While this typically works, it's not ideal. Consider normalizing the URL by removing trailing slashes before concatenation.

Suggested change
const response = await fetch(`${baseUrl}/api/chat`, {
const normalizedBaseUrl = baseUrl.replace(/\/+$/, '');
const response = await fetch(`${normalizedBaseUrl}/api/chat`, {

Copilot uses AI. Check for mistakes.
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
model: model,
messages: [{ role: "user", content: prompt }],
stream: false
})
});

if (!response.ok) {
const status = response.status;
if (status === 404) throw new Error('OLLAMA_MODEL_NOT_FOUND');
throw new Error(`Ollama request failed: ${status}. Make sure Ollama is running.`);
}

const data = await response.json();
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There's no validation that data.message or data.message.content exists before accessing them. If the Ollama API returns a response with a different structure (e.g., missing the message field or content field), this will throw a TypeError. Consider adding null checks or try-catch blocks to handle malformed responses gracefully.

Suggested change
const data = await response.json();
const data = await response.json();
if (!data || !data.message || typeof data.message.content !== 'string') {
throw new Error('OLLAMA_INVALID_RESPONSE');
}

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

makes sense

return data.message.content;
}

function setTranslateStatus(message, type) {
const status = document.getElementById('ai-translate-status');
status.textContent = message;
Expand All @@ -3555,6 +3599,17 @@ function openSettingsModal() {

// Load all saved API keys and models
Object.entries(llmProviders).forEach(([provider, config]) => {
// Handle Ollama specially (no API key, has URL)
if (config.isLocal) {
const urlInput = document.getElementById('settings-ollama-url');

if (urlInput) {
urlInput.value = localStorage.getItem('ollamaUrl') || config.defaultUrl;
}
// Model dropdown will be populated by detectOllamaModels()
return;
}

const savedKey = localStorage.getItem(config.storageKey);
const input = document.getElementById(`settings-api-key-${provider}`);
if (input) {
Expand Down Expand Up @@ -3591,6 +3646,82 @@ function updateProviderSection(provider) {
document.querySelectorAll('.settings-api-section').forEach(section => {
section.style.display = section.dataset.provider === provider ? 'block' : 'none';
});

// Auto-detect Ollama models when switching to Ollama
if (provider === 'ollama') {
detectOllamaModels();
}
Comment on lines +3650 to +3653
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Auto-detecting models when switching to Ollama could create a race condition. If a user quickly switches between providers or closes the settings modal before detection completes, the async detectOllamaModels() call could update UI elements that no longer exist or are in an unexpected state. Consider checking if the modal is still open and the Ollama section is still visible before updating the UI in the detectOllamaModels function.

Copilot uses AI. Check for mistakes.
}

/**
* Detect and populate available Ollama models
*/
async function detectOllamaModels() {
const btn = document.getElementById('ollama-detect-btn');
const select = document.getElementById('settings-model-ollama');
const status = document.getElementById('settings-key-status-ollama');
const urlInput = document.getElementById('settings-ollama-url');

const baseUrl = urlInput.value.trim() || llmProviders.ollama.defaultUrl;

Comment on lines +3665 to +3666
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The server URL input lacks validation. Users could enter invalid URLs (e.g., 'not-a-url', 'javascript:alert(1)') which could cause unexpected behavior or errors when constructing API endpoints. Consider validating that the URL is properly formatted (starts with http:// or https://) before attempting to use it, or sanitizing the input.

Suggested change
const baseUrl = urlInput.value.trim() || llmProviders.ollama.defaultUrl;
const rawUrl = urlInput.value.trim();
let baseUrl = rawUrl || llmProviders.ollama.defaultUrl;
// Validate user-provided URL (if any) to ensure it is a proper http(s) URL
if (rawUrl) {
try {
const parsed = new URL(baseUrl);
if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
throw new Error('Invalid protocol');
}
// Normalize to origin so we always have a clean base URL
baseUrl = parsed.origin;
} catch (e) {
status.textContent = 'Invalid Ollama URL. Please use a valid http:// or https:// address.';
status.className = 'settings-key-status error';
btn.disabled = false;
btn.innerHTML = `
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M23 4v6h-6M1 20v-6h6"/>
<path d="M3.51 9a9 9 0 0114.85-3.36L23 10M1 14l4.64 4.36A9 9 0 0020.49 15"/>
</svg>
Detect
`;
return;
}
}

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would prefer that

// Show loading state
btn.disabled = true;
btn.innerHTML = `
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" style="animation: spin 1s linear infinite;">
<path d="M23 4v6h-6M1 20v-6h6"/>
<path d="M3.51 9a9 9 0 0114.85-3.36L23 10M1 14l4.64 4.36A9 9 0 0020.49 15"/>
</svg>
Detecting...
`;

try {
const models = await fetchOllamaModels(baseUrl);

if (models.length === 0) {
select.innerHTML = '<option value="">No models found</option>';
status.textContent = 'No models found. Pull a model with: ollama pull llama3.2';
status.className = 'settings-key-status error';
} else {
// Get currently saved model
const savedModel = localStorage.getItem(llmProviders.ollama.modelStorageKey) || '';

// Populate dropdown
select.innerHTML = models.map(model => {
const name = model.name;
const size = model.size ? ` (${formatBytes(model.size)})` : '';
const selected = name === savedModel ? ' selected' : '';
return `<option value="${name}"${selected}>${name}${size}</option>`;
}).join('');
Comment on lines +3688 to +3694
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a potential XSS vulnerability here. Model names returned from the Ollama API are directly interpolated into HTML without escaping. If a malicious model name contains HTML/JavaScript code, it could be executed. Consider escaping the model name before inserting it into the HTML, or use textContent instead of innerHTML to populate the options.

Suggested change
// Populate dropdown
select.innerHTML = models.map(model => {
const name = model.name;
const size = model.size ? ` (${formatBytes(model.size)})` : '';
const selected = name === savedModel ? ' selected' : '';
return `<option value="${name}"${selected}>${name}${size}</option>`;
}).join('');
// Populate dropdown without using innerHTML to avoid XSS
// Clear existing options
select.innerHTML = '';
models.forEach(model => {
const name = model.name;
const size = model.size ? ` (${formatBytes(model.size)})` : '';
const option = document.createElement('option');
option.value = name;
option.textContent = `${name}${size}`;
if (name === savedModel) {
option.selected = true;
}
select.appendChild(option);
});

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Pretty low probability, that this would ever happen. We should do this nontheless.


status.textContent = `✓ Found ${models.length} model(s)`;
status.className = 'settings-key-status success';
}
} catch (error) {
console.error('Failed to detect Ollama models:', error);
select.innerHTML = '<option value="">Connection failed</option>';
status.textContent = 'Cannot connect to Ollama. Is it running?';
status.className = 'settings-key-status error';
} finally {
btn.disabled = false;
btn.innerHTML = `
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M23 4v6h-6M1 20v-6h6"/>
<path d="M3.51 9a9 9 0 0114.85-3.36L23 10M1 14l4.64 4.36A9 9 0 0020.49 15"/>
</svg>
Detect
`;
}
}

/**
* Format bytes to human-readable string
*/
function formatBytes(bytes) {
if (bytes === 0) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
Comment on lines +3722 to +3723
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The formatBytes function doesn't handle the case when the calculated index i is greater than the sizes array length. For very large byte values (e.g., terabytes), Math.floor(Math.log(bytes) / Math.log(k)) could return 4 or higher, but the sizes array only has 4 elements (indices 0-3). This would result in "undefined" being appended to the formatted string. Consider adding 'TB', 'PB' to the sizes array or clamping the index to the array length.

Suggested change
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
const sizes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'];
const i = Math.min(Math.floor(Math.log(bytes) / Math.log(k)), sizes.length - 1);

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

thats not realistic in this case

return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
}

function saveSettings() {
Expand All @@ -3601,6 +3732,27 @@ function saveSettings() {
// Save all API keys and models
let allValid = true;
Object.entries(llmProviders).forEach(([provider, config]) => {
// Handle Ollama specially (no API key, has URL)
if (config.isLocal) {
const urlInput = document.getElementById('settings-ollama-url');
const modelInput = document.getElementById(`settings-model-${provider}`);
const status = document.getElementById(`settings-key-status-${provider}`);

if (urlInput) {
const url = urlInput.value.trim() || config.defaultUrl;
localStorage.setItem('ollamaUrl', url);
}
if (modelInput) {
const model = modelInput.value.trim() || config.defaultModel;
localStorage.setItem(config.modelStorageKey, model);
Comment on lines +3745 to +3747
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When no models are found or connection fails, the dropdown is populated with a placeholder option with an empty value. If a user then tries to save settings without detecting models, an empty string could be saved as the model name. This may cause issues when attempting to use the API. Consider either requiring a valid model selection before allowing save, or showing a validation error if the model is empty.

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd prefer requiring valid model selection

}
if (status) {
Comment on lines +3741 to +3749
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The server URL input lacks validation. Invalid URLs are saved to localStorage without verification. Consider validating that the URL is properly formatted (starts with http:// or https://) before saving it to localStorage.

Suggested change
if (urlInput) {
const url = urlInput.value.trim() || config.defaultUrl;
localStorage.setItem('ollamaUrl', url);
}
if (modelInput) {
const model = modelInput.value.trim() || config.defaultModel;
localStorage.setItem(config.modelStorageKey, model);
}
if (status) {
let localSettingsValid = true;
if (urlInput) {
const url = urlInput.value.trim() || config.defaultUrl;
const hasValidScheme = /^https?:\/\//i.test(url);
if (!hasValidScheme) {
if (status) {
status.textContent = 'Invalid URL. Must start with http:// or https://';
status.className = 'settings-key-status error';
}
if (provider === selectedProvider) {
allValid = false;
}
localSettingsValid = false;
} else {
localStorage.setItem('ollamaUrl', url);
}
}
if (modelInput) {
const model = modelInput.value.trim() || config.defaultModel;
localStorage.setItem(config.modelStorageKey, model);
}
if (status && localSettingsValid) {

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same thing, URLs should be validated to give the user feedback

status.textContent = '✓ Settings saved';
status.className = 'settings-key-status success';
}
return;
}

const input = document.getElementById(`settings-api-key-${provider}`);
const status = document.getElementById(`settings-key-status-${provider}`);
if (!input || !status) return;
Expand Down
41 changes: 41 additions & 0 deletions index.html
Original file line number Diff line number Diff line change
Expand Up @@ -1085,6 +1085,10 @@ <h4 class="settings-section-title">
<input type="radio" name="ai-provider" value="google">
<span class="provider-label">Google (Gemini)</span>
</label>
<label class="settings-provider-option">
<input type="radio" name="ai-provider" value="ollama">
<span class="provider-label">Ollama (Local)</span>
</label>
</div>
</div>

Expand Down Expand Up @@ -1181,6 +1185,43 @@ <h4 class="settings-section-title">
</a>
</div>

<div class="settings-section settings-api-section" id="settings-ollama" data-provider="ollama" style="display: none;">
<h4 class="settings-section-title">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<rect x="2" y="3" width="20" height="14" rx="2" ry="2"/>
<path d="M8 21h8M12 17v4"/>
</svg>
Ollama Configuration
</h4>
<p class="settings-description" style="margin-bottom: 12px;">Run AI models locally with Ollama. No API key required.</p>
<div class="settings-model-group" style="margin-bottom: 12px;">
<label class="settings-model-label">Server URL</label>
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The Server URL input field lacks an accessible label association. While there is a visible label element, it should be associated with the input using the 'for' attribute matching the input's id, or the input should have an 'aria-label' attribute. This would improve accessibility for screen reader users.

Suggested change
<label class="settings-model-label">Server URL</label>
<label for="settings-ollama-url" class="settings-model-label">Server URL</label>

Copilot uses AI. Check for mistakes.
<div style="display: flex; gap: 8px;">
<input type="text" id="settings-ollama-url" class="settings-model-select" placeholder="http://localhost:11434" style="flex: 1; padding: 8px 12px; background: var(--bg-tertiary); border: 1px solid var(--border); border-radius: 6px; color: var(--text-primary);" />
<button type="button" id="ollama-detect-btn" title="Detect available models" style="padding: 8px 12px; background: var(--bg-tertiary); border: 1px solid var(--border); border-radius: 6px; color: var(--text-primary); cursor: pointer; display: flex; align-items: center; gap: 4px;">
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M23 4v6h-6M1 20v-6h6"/>
<path d="M3.51 9a9 9 0 0114.85-3.36L23 10M1 14l4.64 4.36A9 9 0 0020.49 15"/>
</svg>
Detect
</button>
</div>
</div>
<div class="settings-model-group">
<label class="settings-model-label">Model</label>
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The Model select element lacks an accessible label association. While there is a visible label element, it should be associated with the select using the 'for' attribute matching the select's id, or the select should have an 'aria-label' attribute. This would improve accessibility for screen reader users.

Suggested change
<label class="settings-model-label">Model</label>
<label for="settings-model-ollama" class="settings-model-label">Model</label>

Copilot uses AI. Check for mistakes.
<select id="settings-model-ollama" class="settings-model-select">
<option value="">-- Click Detect to find models --</option>
</select>
</div>
<div class="settings-key-status" id="settings-key-status-ollama"></div>
<a href="https://ollama.com/library" target="_blank" class="settings-link">
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M18 13v6a2 2 0 01-2 2H5a2 2 0 01-2-2V8a2 2 0 012-2h6M15 3h6v6M10 14L21 3"/>
</svg>
Browse available models on Ollama Library
</a>
</div>

<div class="modal-buttons">
<button class="modal-btn modal-btn-cancel" id="settings-modal-cancel">Cancel</button>
<button class="modal-btn modal-btn-confirm" id="settings-modal-save">Save Settings</button>
Expand Down
46 changes: 45 additions & 1 deletion llm.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,23 @@ const llmProviders = {
{ id: 'gemini-2.5-pro', name: 'Gemini 2.5 Pro ($$$)' }
],
defaultModel: 'gemini-2.5-flash'
},
ollama: {
name: 'Ollama (Local)',
keyPrefix: null, // No API key required
storageKey: null, // No API key storage
modelStorageKey: 'ollamaModel',
urlStorageKey: 'ollamaUrl',
defaultUrl: 'http://localhost:11434',
models: [], // User specifies their own model
defaultModel: 'llama3.2',
isLocal: true
}
};

/**
* Get the selected model for a provider
* @param {string} provider - Provider key (anthropic, openai, google)
* @param {string} provider - Provider key (anthropic, openai, google, ollama)
* @returns {string} - Model ID
*/
function getSelectedModel(provider) {
Expand All @@ -51,6 +62,39 @@ function getSelectedModel(provider) {
return localStorage.getItem(config.modelStorageKey) || config.defaultModel;
}

/**
* Get the Ollama base URL
* @returns {string} - Ollama URL
*/
function getOllamaUrl() {
return localStorage.getItem('ollamaUrl') || llmProviders.ollama.defaultUrl;
}

/**
* Fetch available models from Ollama
* @param {string} baseUrl - Ollama server URL (optional, uses saved URL if not provided)
* @returns {Promise<Array>} - Array of model objects with name and size
*/
async function fetchOllamaModels(baseUrl = null) {
const url = baseUrl || getOllamaUrl();
try {
const response = await fetch(`${url}/api/tags`, {
Comment on lines +80 to +81
Copy link

Copilot AI Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The URL concatenation doesn't handle trailing slashes properly. If the user enters a URL with a trailing slash (e.g., 'http://localhost:11434/'), the resulting URL will be 'http://localhost:11434//api/tags' with a double slash. While this typically works, it's not ideal. Consider normalizing the URL by removing trailing slashes before concatenation.

Suggested change
try {
const response = await fetch(`${url}/api/tags`, {
const normalizedUrl = url.replace(/\/+$/, '');
try {
const response = await fetch(`${normalizedUrl}/api/tags`, {

Copilot uses AI. Check for mistakes.
method: 'GET',
headers: { 'Content-Type': 'application/json' }
});

if (!response.ok) {
throw new Error(`Failed to fetch models: ${response.status}`);
}

const data = await response.json();
return data.models || [];
} catch (error) {
console.error('Error fetching Ollama models:', error);
return [];
}
}

/**
* Get the selected provider
* @returns {string} - Provider key
Expand Down
Loading
Loading