Skip to content

Commit 742d1bc

Browse files
michaelbeijerclaude
andcommitted
Add OpenRouter as dedicated AI provider, bump to v1.9.371
OpenRouter gateway gives access to 200+ models with a single API key. Curated dropdown of 8 models + editable field for any model ID. Uses OpenAI-compatible API with HTTP-Referer/X-Title headers. Co-Authored-By: Claude Opus 4.6 <[email protected]>
1 parent fcb8001 commit 742d1bc

4 files changed

Lines changed: 144 additions & 7 deletions

File tree

CHANGELOG.md

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,16 @@
22

33
All notable changes to Supervertaler Workbench are documented in this file.
44

5-
**Current Version:** v1.9.370 (March 31, 2026)
5+
**Current Version:** v1.9.371 (April 2, 2026)
66

77

8+
## v1.9.371 - April 2, 2026
9+
10+
### Added
11+
- **OpenRouter provider** – access 200+ models from OpenAI, Anthropic, Google, Mistral, and others with a single API key via openrouter.ai. Includes a curated dropdown of 8 recommended models (Claude Sonnet/Opus, GPT-5.4/Mini, Gemini 3.1 Pro/Flash, Mistral Small, Qwen 3.6 Plus Free) plus an editable model field for typing any OpenRouter model ID
12+
13+
---
14+
815
## v1.9.370 - March 31, 2026
916

1017
### Changed

Supervertaler.py

Lines changed: 68 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9119,6 +9119,9 @@ def _update_llm_indicator(self):
91199119
elif provider == 'mistral':
91209120
icon = "🌀"
91219121
display = f"{icon} {model}"
9122+
elif provider == 'openrouter':
9123+
icon = "🌐"
9124+
display = f"{icon} {model}"
91229125
elif provider == 'custom_openai':
91239126
icon = "🔌"
91249127
profile = self._get_active_custom_profile(settings)
@@ -17876,6 +17879,11 @@ def _create_ai_settings_tab(self):
1787617879
provider_button_group.addButton(mistral_radio)
1787717880
provider_layout.addWidget(mistral_radio)
1787817881

17882+
openrouter_radio = CustomRadioButton("🌐 OpenRouter (200+ models)")
17883+
openrouter_radio.setChecked(settings.get('provider', 'openai') == 'openrouter')
17884+
provider_button_group.addButton(openrouter_radio)
17885+
provider_layout.addWidget(openrouter_radio)
17886+
1787917887
# Local LLM option (Ollama)
1788017888
ollama_radio = CustomRadioButton("🖥️ Local LLM (Ollama - runs on your computer)")
1788117889
ollama_radio.setChecked(settings.get('provider', 'openai') == 'ollama')
@@ -18010,6 +18018,42 @@ def _create_ai_settings_tab(self):
1801018018

1801118019
model_layout.addSpacing(10)
1801218020

18021+
# OpenRouter models
18022+
openrouter_model_label = QLabel("<b>🌐 OpenRouter Models:</b>")
18023+
model_layout.addWidget(openrouter_model_label)
18024+
18025+
openrouter_combo = QComboBox()
18026+
openrouter_combo.setEditable(True)
18027+
openrouter_combo.addItems([
18028+
"anthropic/claude-sonnet-4.6 (Recommended)",
18029+
"anthropic/claude-opus-4.6 (Premium Reasoning)",
18030+
"openai/gpt-5.4 (Advanced Reasoning)",
18031+
"openai/gpt-5.4-mini (Fast & Economical)",
18032+
"google/gemini-3.1-pro-preview (Latest Gemini)",
18033+
"google/gemini-3-flash-preview (Fast Gemini)",
18034+
"mistralai/mistral-small-2603 (European Languages)",
18035+
"qwen/qwen3.6-plus:free (Free)"
18036+
])
18037+
openrouter_combo.setToolTip(
18038+
"OpenRouter gives you access to 200+ models with a single API key.\n"
18039+
"Select a model from the dropdown or type any model ID from openrouter.ai/models.\n\n"
18040+
"Pricing varies per model — see openrouter.ai for details."
18041+
)
18042+
current_openrouter_model = settings.get('openrouter_model', 'anthropic/claude-sonnet-4.6')
18043+
# Try to select from dropdown, or set as typed text
18044+
found = False
18045+
for i in range(openrouter_combo.count()):
18046+
if current_openrouter_model in openrouter_combo.itemText(i):
18047+
openrouter_combo.setCurrentIndex(i)
18048+
found = True
18049+
break
18050+
if not found:
18051+
openrouter_combo.setEditText(current_openrouter_model)
18052+
openrouter_combo.setEnabled(openrouter_radio.isChecked())
18053+
model_layout.addWidget(openrouter_combo)
18054+
18055+
model_layout.addSpacing(10)
18056+
1801318057
# Local LLM (Ollama) models
1801418058
ollama_model_label = QLabel("<b>🖥️ Local LLM Models (Ollama):</b>")
1801518059
model_layout.addWidget(ollama_model_label)
@@ -18235,19 +18279,23 @@ def _update_provider_label(radio, base_name, combo):
1823518279
lambda: _update_provider_label(gemini_radio, "Google Gemini", gemini_combo))
1823618280
mistral_combo.currentIndexChanged.connect(
1823718281
lambda: _update_provider_label(mistral_radio, "Mistral AI", mistral_combo))
18282+
openrouter_combo.currentIndexChanged.connect(
18283+
lambda: _update_provider_label(openrouter_radio, "🌐 OpenRouter (200+ models)", openrouter_combo))
1823818284

1823918285
# Set initial labels based on current combo selections
1824018286
_update_provider_label(openai_radio, "OpenAI", openai_combo)
1824118287
_update_provider_label(claude_radio, "Anthropic Claude", claude_combo)
1824218288
_update_provider_label(gemini_radio, "Google Gemini", gemini_combo)
1824318289
_update_provider_label(mistral_radio, "Mistral AI", mistral_combo)
18290+
_update_provider_label(openrouter_radio, "🌐 OpenRouter (200+ models)", openrouter_combo)
1824418291

1824518292
# Connect radio buttons to enable/disable combos
1824618293
def update_combo_states():
1824718294
openai_combo.setEnabled(openai_radio.isChecked())
1824818295
claude_combo.setEnabled(claude_radio.isChecked())
1824918296
gemini_combo.setEnabled(gemini_radio.isChecked())
1825018297
mistral_combo.setEnabled(mistral_radio.isChecked())
18298+
openrouter_combo.setEnabled(openrouter_radio.isChecked())
1825118299
if ollama_status_widget:
1825218300
ollama_status_widget.setEnabled(ollama_radio.isChecked())
1825318301
_custom_enabled = custom_radio.isChecked()
@@ -18292,6 +18340,7 @@ def on_ollama_selected(checked):
1829218340
claude_radio.toggled.connect(update_combo_states)
1829318341
gemini_radio.toggled.connect(update_combo_states)
1829418342
mistral_radio.toggled.connect(update_combo_states)
18343+
openrouter_radio.toggled.connect(update_combo_states)
1829518344
ollama_radio.toggled.connect(update_combo_states)
1829618345
ollama_radio.toggled.connect(on_ollama_selected)
1829718346
custom_radio.toggled.connect(update_combo_states)
@@ -18345,6 +18394,7 @@ def on_ollama_selected(checked):
1834518394
("Claude (Anthropic):", "claude", "sk-ant-api03-..."),
1834618395
("Gemini (Google AI):", "gemini", "AIza..."),
1834718396
("Mistral AI:", "mistral", "..."),
18397+
("OpenRouter:", "openrouter", "sk-or-..."),
1834818398
("Ollama Endpoint:", "ollama_endpoint", "http://localhost:11434"),
1834918399
]
1835018400

@@ -18403,6 +18453,10 @@ def on_ollama_selected(checked):
1840318453
mistral_enable_cb.setChecked(enabled_providers.get('llm_mistral', True))
1840418454
provider_enable_layout.addWidget(mistral_enable_cb)
1840518455

18456+
openrouter_enable_cb = CheckmarkCheckBox("Enable OpenRouter")
18457+
openrouter_enable_cb.setChecked(enabled_providers.get('llm_openrouter', True))
18458+
provider_enable_layout.addWidget(openrouter_enable_cb)
18459+
1840618460
ollama_enable_cb = CheckmarkCheckBox("Enable Local LLM (Ollama)")
1840718461
ollama_enable_cb.setChecked(enabled_providers.get('llm_ollama', True))
1840818462
provider_enable_layout.addWidget(ollama_enable_cb)
@@ -18762,7 +18816,9 @@ def update_ql_context_label(value):
1876218816
custom_model_input=custom_model_input, custom_enable_cb=custom_enable_cb,
1876318817
custom_profile_combo=custom_profile_combo, custom_key_input=custom_key_input,
1876418818
mistral_radio=mistral_radio, mistral_combo=mistral_combo,
18765-
mistral_enable_cb=mistral_enable_cb
18819+
mistral_enable_cb=mistral_enable_cb,
18820+
openrouter_radio=openrouter_radio, openrouter_combo=openrouter_combo,
18821+
openrouter_enable_cb=openrouter_enable_cb
1876618822
))
1876718823
layout.addWidget(save_btn)
1876818824

@@ -22559,7 +22615,9 @@ def _save_ai_settings_from_ui(self, openai_radio, claude_radio, gemini_radio, ol
2255922615
custom_model_input=None, custom_enable_cb=None,
2256022616
custom_profile_combo=None, custom_key_input=None,
2256122617
mistral_radio=None, mistral_combo=None,
22562-
mistral_enable_cb=None):
22618+
mistral_enable_cb=None,
22619+
openrouter_radio=None, openrouter_combo=None,
22620+
openrouter_enable_cb=None):
2256322621
"""Save all AI settings from the unified AI Settings tab"""
2256422622
# Determine selected provider
2256522623
if openai_radio.isChecked():
@@ -22570,6 +22628,8 @@ def _save_ai_settings_from_ui(self, openai_radio, claude_radio, gemini_radio, ol
2257022628
provider = 'gemini'
2257122629
elif mistral_radio and mistral_radio.isChecked():
2257222630
provider = 'mistral'
22631+
elif openrouter_radio and openrouter_radio.isChecked():
22632+
provider = 'openrouter'
2257322633
elif ollama_radio.isChecked():
2257422634
provider = 'ollama'
2257522635
elif custom_radio and custom_radio.isChecked():
@@ -22608,6 +22668,7 @@ def _save_ai_settings_from_ui(self, openai_radio, claude_radio, gemini_radio, ol
2260822668
'claude_model': claude_combo.currentText().split()[0],
2260922669
'gemini_model': gemini_combo.currentText().split()[0],
2261022670
'mistral_model': mistral_combo.currentText().split()[0] if mistral_combo else 'mistral-large-latest',
22671+
'openrouter_model': openrouter_combo.currentText().split()[0] if openrouter_combo else 'anthropic/claude-sonnet-4.6',
2261122672
'ollama_model': ollama_model,
2261222673
'custom_openai_model': active_model,
2261322674
'custom_openai_endpoint': active_endpoint,
@@ -22627,6 +22688,7 @@ def _save_ai_settings_from_ui(self, openai_radio, claude_radio, gemini_radio, ol
2262722688
'llm_claude': claude_enable_cb.isChecked(),
2262822689
'llm_gemini': gemini_enable_cb.isChecked(),
2262922690
'llm_mistral': mistral_enable_cb.isChecked() if mistral_enable_cb else True,
22691+
'llm_openrouter': openrouter_enable_cb.isChecked() if openrouter_enable_cb else True,
2263022692
'llm_ollama': ollama_enable_cb.isChecked(),
2263122693
'llm_custom_openai': custom_enable_cb.isChecked() if custom_enable_cb else True
2263222694
}
@@ -47045,7 +47107,9 @@ def create_llm_client(self, provider, model, api_keys, settings=None):
4704547107
from modules.llm_clients import LLMClient
4704647108
api_key = api_keys.get(provider) or (api_keys.get('google') if provider == 'gemini' else None)
4704747109
base_url = None
47048-
if provider == 'custom_openai':
47110+
if provider == 'openrouter':
47111+
base_url = 'https://openrouter.ai/api/v1'
47112+
elif provider == 'custom_openai':
4704947113
if settings is None:
4705047114
settings = self.load_llm_settings()
4705147115
profile = self._get_active_custom_profile(settings)
@@ -47069,6 +47133,7 @@ def load_provider_enabled_states(self) -> Dict[str, bool]:
4706947133
'llm_claude': True,
4707047134
'llm_gemini': True,
4707147135
'llm_mistral': True,
47136+
'llm_openrouter': True,
4707247137
'llm_ollama': True,
4707347138
'llm_custom_openai': True,
4707447139
'mt_google_translate': True,

modules/llm_clients.py

Lines changed: 67 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ def _sanitize_ollama_endpoint(endpoint: str) -> str:
139139
@dataclass
140140
class LLMConfig:
141141
"""Configuration for LLM client"""
142-
provider: Literal["openai", "claude", "gemini", "mistral"]
142+
provider: Literal["openai", "claude", "gemini", "mistral", "openrouter"]
143143
model: str
144144
api_key: str
145145
temperature: Optional[float] = None # Auto-detected if None
@@ -156,7 +156,8 @@ class LLMClient:
156156
"gemini": "gemini-2.5-flash", # Gemini 2.5 Flash (2025)
157157
"mistral": "mistral-large-latest", # Mistral Large (flagship)
158158
"ollama": "translategemma:12b", # Local LLM via Ollama - purpose-built translation model
159-
"custom_openai": "custom-model" # Custom OpenAI-compatible endpoint
159+
"custom_openai": "custom-model", # Custom OpenAI-compatible endpoint
160+
"openrouter": "anthropic/claude-sonnet-4.6" # OpenRouter gateway (200+ models)
160161
}
161162

162163
# Available Mistral models with descriptions
@@ -181,6 +182,59 @@ class LLMClient:
181182
}
182183
}
183184

185+
# Available OpenRouter models (curated selection)
186+
# OpenRouter is an API gateway — users can also type any model ID from openrouter.ai/models
187+
OPENROUTER_MODELS = {
188+
"anthropic/claude-sonnet-4.6": {
189+
"name": "Claude Sonnet 4.6",
190+
"description": "Anthropic flagship — fast, high quality",
191+
"strengths": ["General translation", "Multilingual", "Fast"],
192+
"use_case": "Recommended for most translation tasks"
193+
},
194+
"anthropic/claude-opus-4.6": {
195+
"name": "Claude Opus 4.6",
196+
"description": "Anthropic premium — best reasoning",
197+
"strengths": ["Legal translation", "Technical documents", "Complex reasoning"],
198+
"use_case": "Specialized legal/technical translation"
199+
},
200+
"openai/gpt-5.4": {
201+
"name": "GPT 5.4",
202+
"description": "OpenAI flagship — advanced reasoning",
203+
"strengths": ["Complex reasoning", "Multilingual", "High accuracy"],
204+
"use_case": "Complex translation tasks"
205+
},
206+
"openai/gpt-5.4-mini": {
207+
"name": "GPT 5.4 Mini",
208+
"description": "OpenAI fast & economical",
209+
"strengths": ["Fast", "Cost-effective", "Multilingual"],
210+
"use_case": "High-volume translation"
211+
},
212+
"google/gemini-3.1-pro-preview": {
213+
"name": "Gemini 3.1 Pro",
214+
"description": "Google latest — strong multilingual",
215+
"strengths": ["Multilingual", "Large context", "High quality"],
216+
"use_case": "General translation"
217+
},
218+
"google/gemini-3-flash-preview": {
219+
"name": "Gemini 3 Flash",
220+
"description": "Google fast — great for high volume",
221+
"strengths": ["Fast", "Cost-effective", "Multilingual"],
222+
"use_case": "High-volume translation"
223+
},
224+
"mistralai/mistral-small-2603": {
225+
"name": "Mistral Small",
226+
"description": "Mistral cost-effective — strong European languages",
227+
"strengths": ["European languages", "Fast", "Cost-effective"],
228+
"use_case": "European language translation"
229+
},
230+
"qwen/qwen3.6-plus:free": {
231+
"name": "Qwen 3.6 Plus (Free)",
232+
"description": "Free tier — no cost, good quality",
233+
"strengths": ["Free", "Multilingual", "100+ languages"],
234+
"use_case": "Testing or budget-constrained projects"
235+
}
236+
}
237+
184238
# Available Ollama models with descriptions (for UI display)
185239
# Last audited: February 2026
186240
OLLAMA_MODELS = {
@@ -479,6 +533,10 @@ def __init__(self, api_key: str = None, provider: str = "openai", model: Optiona
479533
if self.provider == "mistral" and not self.base_url:
480534
self.base_url = "https://api.mistral.ai/v1"
481535

536+
# For OpenRouter, set the base URL and extra headers
537+
if self.provider == "openrouter" and not self.base_url:
538+
self.base_url = "https://openrouter.ai/api/v1"
539+
482540
# Auto-detect temperature based on model
483541
self.temperature = self._get_temperature()
484542

@@ -727,6 +785,8 @@ def translate(
727785
result = self._call_gemini(prompt, max_tokens=max_tokens, images=images, system_prompt=system_prompt)
728786
elif self.provider == "mistral":
729787
result = self._call_openai(prompt, max_tokens=max_tokens, images=None, system_prompt=system_prompt)
788+
elif self.provider == "openrouter":
789+
result = self._call_openai(prompt, max_tokens=max_tokens, images=None, system_prompt=system_prompt)
730790
elif self.provider == "ollama":
731791
result = self._call_ollama(prompt, max_tokens=max_tokens, system_prompt=system_prompt)
732792
else:
@@ -760,6 +820,11 @@ def _call_openai(self, prompt: str, max_tokens: Optional[int] = None, images: Op
760820
client_kwargs = {"api_key": self.api_key, "timeout": timeout_seconds}
761821
if self.base_url:
762822
client_kwargs["base_url"] = self.base_url
823+
if self.provider == "openrouter":
824+
client_kwargs["default_headers"] = {
825+
"HTTP-Referer": "https://supervertaler.com",
826+
"X-Title": "Supervertaler"
827+
}
763828
if self.http_proxy:
764829
import httpx
765830
client_kwargs["http_client"] = httpx.Client(proxy=self.http_proxy, timeout=timeout_seconds)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "supervertaler"
7-
version = "1.9.370"
7+
version = "1.9.371"
88
description = "Professional AI-enhanced translation workbench with multi-LLM support, glossary system, TM, spellcheck, voice commands, and PyQt6 interface. Batteries included (core)."
99
readme = "README.md"
1010
requires-python = ">=3.10"

0 commit comments

Comments
 (0)