diff --git a/pyproject.toml b/pyproject.toml index 0a3f566..510bc62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ dependencies = [ "anthropic==0.48.0", "requests==2.32.3", "responses==0.25.6", + "isort==6.0.1", ] classifiers = [ "Development Status :: 5 - Production/Stable", diff --git a/python_gpt_po/services/providers/anthropic_provider.py b/python_gpt_po/services/providers/anthropic_provider.py index 67d33d3..bf85b8d 100644 --- a/python_gpt_po/services/providers/anthropic_provider.py +++ b/python_gpt_po/services/providers/anthropic_provider.py @@ -62,3 +62,16 @@ def get_fallback_models(self) -> List[str]: "claude-3-5-sonnet-latest", "claude-3-opus-20240229", ] + + def translate(self, provider_clients: ProviderClients, model: str, content: str) -> str: + """Get response from Anthropic API.""" + if not self.is_client_initialized(provider_clients): + raise ValueError("Anthropic client not initialized") + + message = {"role": "user", "content": content} + completion = provider_clients.anthropic_client.messages.create( + model=model, + max_tokens=4000, + messages=[message] + ) + return completion.content[0].text.strip() diff --git a/python_gpt_po/services/providers/azure_openai_provider.py b/python_gpt_po/services/providers/azure_openai_provider.py index 5bc7fa1..9b223c9 100644 --- a/python_gpt_po/services/providers/azure_openai_provider.py +++ b/python_gpt_po/services/providers/azure_openai_provider.py @@ -45,3 +45,16 @@ def is_client_initialized(self, provider_clients: ProviderClients) -> bool: def get_fallback_models(self) -> List[str]: """Get fallback models for Azure OpenAI.""" return ["gpt-35-turbo", "gpt-4"] + + def translate(self, provider_clients: ProviderClients, model: str, content: str) -> str: + """Get response from OpenAI API.""" + if not self.is_client_initialized(provider_clients): + raise ValueError("OpenAI client not initialized") + + message = {"role": "user", "content": content} + completion = provider_clients.azure_openai_client.chat.completions.create( + model=model, + max_tokens=4000, + messages=[message] + ) + return completion.choices[0].message.content.strip() diff --git a/python_gpt_po/services/providers/base.py b/python_gpt_po/services/providers/base.py index fe1a70c..933b536 100644 --- a/python_gpt_po/services/providers/base.py +++ b/python_gpt_po/services/providers/base.py @@ -58,3 +58,16 @@ def get_fallback_models(self) -> List[str]: List of fallback model IDs """ return [] + + @abstractmethod + def translate(self, provider_clients: ProviderClients, model: str, content: str) -> str: + """Translate content using the specified model. + + Args: + provider_clients: Provider clients instance + model: Model to use for translation + content: Content to translate + + Returns: + Translated content + """ diff --git a/python_gpt_po/services/providers/deepseek_provider.py b/python_gpt_po/services/providers/deepseek_provider.py index 5511667..eb5447d 100644 --- a/python_gpt_po/services/providers/deepseek_provider.py +++ b/python_gpt_po/services/providers/deepseek_provider.py @@ -54,3 +54,26 @@ def is_client_initialized(self, provider_clients: ProviderClients) -> bool: def get_fallback_models(self) -> List[str]: """Get fallback models for DeepSeek.""" return ["deepseek-chat", "deepseek-coder"] + + def translate(self, provider_clients: ProviderClients, model: str, content: str) -> str: + """Get response from DeepSeek API.""" + if not self.is_client_initialized(provider_clients): + raise ValueError("DeepSeek client not initialized") + + headers = { + "Authorization": f"Bearer {provider_clients.deepseek_api_key}", + "Content-Type": "application/json" + } + payload = { + "model": model, + "messages": [{"role": "user", "content": content}], + "max_tokens": 4000 + } + response = requests.post( + f"{provider_clients.deepseek_base_url}/chat/completions", + headers=headers, + json=payload, + timeout=30 + ) + response.raise_for_status() + return response.json()["choices"][0]["message"]["content"].strip() diff --git a/python_gpt_po/services/providers/openai_provider.py b/python_gpt_po/services/providers/openai_provider.py index c9cd80f..af7e27b 100644 --- a/python_gpt_po/services/providers/openai_provider.py +++ b/python_gpt_po/services/providers/openai_provider.py @@ -51,3 +51,15 @@ def get_fallback_models(self) -> List[str]: "gpt-4", "gpt-3.5-turbo" ] + + def translate(self, provider_clients: ProviderClients, model: str, content: str) -> str: + """Get response from OpenAI API.""" + if not self.is_client_initialized(provider_clients): + raise ValueError("OpenAI client not initialized") + + message = {"role": "user", "content": content} + completion = provider_clients.openai_client.chat.completions.create( + model=model, + messages=[message] + ) + return completion.choices[0].message.content.strip() diff --git a/python_gpt_po/services/translation_service.py b/python_gpt_po/services/translation_service.py index f7c2c96..d09f77f 100644 --- a/python_gpt_po/services/translation_service.py +++ b/python_gpt_po/services/translation_service.py @@ -10,13 +10,12 @@ from typing import Any, Dict, List, Optional import polib -import requests from tenacity import retry, stop_after_attempt, wait_fixed from ..models.config import TranslationConfig -from ..models.enums import ModelProvider from .model_manager import ModelManager from .po_file_handler import POFileHandler +from .providers.registry import ProviderRegistry class TranslationService: @@ -35,67 +34,6 @@ def __init__(self, config: TranslationConfig, batch_size: int = 40): self.po_file_handler = POFileHandler() self.model_manager = ModelManager() - def _get_openai_response(self, content: str) -> str: - """Get response from OpenAI API.""" - if not self.config.provider_clients.openai_client: - raise ValueError("OpenAI client not initialized") - - message = {"role": "user", "content": content} - completion = self.config.provider_clients.openai_client.chat.completions.create( - model=self.config.model, - messages=[message] - ) - return completion.choices[0].message.content.strip() - - def _get_anthropic_response(self, content: str) -> str: - """Get response from Anthropic API.""" - if not self.config.provider_clients.anthropic_client: - raise ValueError("Anthropic client not initialized") - - message = {"role": "user", "content": content} - completion = self.config.provider_clients.anthropic_client.messages.create( - model=self.config.model, - max_tokens=4000, - messages=[message] - ) - return completion.content[0].text.strip() - - def _get_deepseek_response(self, content: str) -> str: - """Get response from DeepSeek API.""" - if not self.config.provider_clients.deepseek_api_key: - raise ValueError("DeepSeek API key not set") - - headers = { - "Authorization": f"Bearer {self.config.provider_clients.deepseek_api_key}", - "Content-Type": "application/json" - } - payload = { - "model": self.config.model, - "messages": [{"role": "user", "content": content}], - "max_tokens": 4000 - } - response = requests.post( - f"{self.config.provider_clients.deepseek_base_url}/chat/completions", - headers=headers, - json=payload, - timeout=30 - ) - response.raise_for_status() - return response.json()["choices"][0]["message"]["content"].strip() - - def _get_azure_openai_response(self, content: str) -> str: - """Get response from OpenAI API.""" - if not self.config.provider_clients.azure_openai_client: - raise ValueError("OpenAI client not initialized") - - message = {"role": "user", "content": content} - completion = self.config.provider_clients.azure_openai_client.chat.completions.create( - model=self.config.model, - max_tokens=4000, - messages=[message] - ) - return completion.choices[0].message.content.strip() - def validate_provider_connection(self) -> bool: """Validates the connection to the selected provider by making a test API call.""" provider = self.config.provider @@ -238,15 +176,13 @@ def _get_provider_response(self, content: str) -> str: """Get translation response from the selected provider.""" provider = self.config.provider - if provider == ModelProvider.OPENAI: - return self._get_openai_response(content) - if provider == ModelProvider.ANTHROPIC: - return self._get_anthropic_response(content) - if provider == ModelProvider.DEEPSEEK: - return self._get_deepseek_response(content) - if provider == ModelProvider.AZURE_OPENAI: - return self._get_azure_openai_response(content) - return "" + if not provider: + return "" + + provider_instance = ProviderRegistry.get_provider(provider) + if not provider_instance: + return "" + return provider_instance.translate(self.config.provider_clients, self.config.model, content) def _process_bulk_response(self, response_text: str, original_texts: List[str]) -> List[str]: """Process a bulk translation response.""" diff --git a/python_gpt_po/tests/providers/__init__.py b/python_gpt_po/tests/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python_gpt_po/tests/providers/test_anthropic_provider.py b/python_gpt_po/tests/providers/test_anthropic_provider.py new file mode 100644 index 0000000..e5a8773 --- /dev/null +++ b/python_gpt_po/tests/providers/test_anthropic_provider.py @@ -0,0 +1,32 @@ +from unittest.mock import MagicMock + +import pytest + +from python_gpt_po.models.provider_clients import ProviderClients +from python_gpt_po.services.providers.anthropic_provider import AnthropicProvider + + +@pytest.fixture +def mock_provider_clients() -> ProviderClients: + """Mock provider clients for testing.""" + clients = ProviderClients() + clients.anthropic_client = MagicMock() + clients.anthropic_client.api_key = "sk-ant-mock-key" + return clients + +def test_translate(mock_provider_clients: ProviderClients) -> None: + """Test bulk translation with Anthropic.""" + # Setup mock response + mock_chatcompletion = MagicMock() + mock_chatcompletion.content = [MagicMock()] + mock_chatcompletion.content[0].text = '["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]' + mock_provider_clients.anthropic_client.messages.create.return_value = mock_chatcompletion + + provider = AnthropicProvider() + translations = provider.translate( + provider_clients=mock_provider_clients, + model="gpt-4", + content="['Hello', 'World', 'Welcome to our application', 'Goodbye']" + ) + + assert translations == '["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]' diff --git a/python_gpt_po/tests/providers/test_azure_openai_provider.py b/python_gpt_po/tests/providers/test_azure_openai_provider.py new file mode 100644 index 0000000..848b6c2 --- /dev/null +++ b/python_gpt_po/tests/providers/test_azure_openai_provider.py @@ -0,0 +1,32 @@ +from unittest.mock import MagicMock + +import pytest + +from python_gpt_po.models.provider_clients import ProviderClients +from python_gpt_po.services.providers.azure_openai_provider import AzureOpenAIProvider + + +@pytest.fixture +def mock_provider_clients() -> ProviderClients: + """Mock provider clients for testing.""" + clients = ProviderClients() + clients.azure_openai_client = MagicMock() + clients.azure_openai_client.api_key = "sk-aoi-mock-key" + return clients + +def test_translate(mock_provider_clients: ProviderClients) -> None: + """Test bulk translation with Azure OpenAI.""" + # Setup mock response + mock_chatcompletion = MagicMock() + mock_chatcompletion.choices = [MagicMock()] + mock_chatcompletion.choices[0].message.content = '["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]' + mock_provider_clients.azure_openai_client.chat.completions.create.return_value = mock_chatcompletion + + provider = AzureOpenAIProvider() + translations = provider.translate( + provider_clients=mock_provider_clients, + model="gpt-4", + content="['Hello', 'World', 'Welcome to our application', 'Goodbye']" + ) + + assert translations == '["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]' diff --git a/python_gpt_po/tests/providers/test_deepseek_provider.py b/python_gpt_po/tests/providers/test_deepseek_provider.py new file mode 100644 index 0000000..33c9d1d --- /dev/null +++ b/python_gpt_po/tests/providers/test_deepseek_provider.py @@ -0,0 +1,42 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from python_gpt_po.models.provider_clients import ProviderClients +from python_gpt_po.services.providers.deepseek_provider import DeepSeekProvider + +DEEPSEEK_TRANSLATION_RESPONSE = { + "choices": [ + { + "message": { + "content": "```json\n[\"Bonjour\", \"Monde\", \"Bienvenue dans notre application\", \"Au revoir\"]\n```" + } + } + ] +} + +@pytest.fixture +def mock_provider_clients() -> ProviderClients: + """Mock provider clients for testing.""" + clients = ProviderClients() + clients.deepseek_api_key = "sk-deepseek-mock-key" + clients.deepseek_base_url = "https://api.deepseek.com/v1" + return clients + +@patch('python_gpt_po.services.providers.deepseek_provider.requests.post') +def test_translate(mock_post: MagicMock, mock_provider_clients: ProviderClients) -> None: + """Test translation with DeepSeek.""" + # Setup mock response + mock_response = MagicMock() + mock_response.json.return_value = DEEPSEEK_TRANSLATION_RESPONSE + mock_post.return_value = mock_response + + provider = DeepSeekProvider() + translations = provider.translate( + provider_clients=mock_provider_clients, + model="deepseek-chat", + content="['Hello', 'World', 'Welcome to our application', 'Goodbye']" + ) + + print(type(translations)) + assert translations == '```json\n["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]\n```' diff --git a/python_gpt_po/tests/providers/test_openai_provider.py b/python_gpt_po/tests/providers/test_openai_provider.py new file mode 100644 index 0000000..4d20378 --- /dev/null +++ b/python_gpt_po/tests/providers/test_openai_provider.py @@ -0,0 +1,31 @@ +from unittest.mock import MagicMock + +import pytest + +from python_gpt_po.models.provider_clients import ProviderClients +from python_gpt_po.services.providers.openai_provider import OpenAIProvider + + +@pytest.fixture +def mock_provider_clients() -> ProviderClients: + """Mock provider clients for testing.""" + clients = ProviderClients() + clients.openai_client = MagicMock() + return clients + +def test_translate(mock_provider_clients: ProviderClients) -> None: + """Test bulk translation with OpenAI.""" + # Setup mock response + mock_chatcompletion = MagicMock() + mock_chatcompletion.choices = [MagicMock()] + mock_chatcompletion.choices[0].message.content = '["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]' + mock_provider_clients.openai_client.chat.completions.create.return_value = mock_chatcompletion + + provider = OpenAIProvider() + translations = provider.translate( + provider_clients=mock_provider_clients, + model="gpt-4", + content="['Hello', 'World', 'Welcome to our application', 'Goodbye']" + ) + + assert translations == '["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]' diff --git a/python_gpt_po/tests/test_multi_provider.py b/python_gpt_po/tests/test_multi_provider.py index b09536e..89cc8f5 100644 --- a/python_gpt_po/tests/test_multi_provider.py +++ b/python_gpt_po/tests/test_multi_provider.py @@ -8,6 +8,7 @@ import pytest import responses +from providers.test_deepseek_provider import DEEPSEEK_TRANSLATION_RESPONSE from python_gpt_po.models.config import TranslationConfig, TranslationFlags # Import the necessary classes from the new modular structure @@ -99,38 +100,6 @@ ] } -# Translation responses for different providers -OPENAI_TRANSLATION_RESPONSE = { - "choices": [ - { - "message": { - "content": '["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]' - } - } - ] -} - -AZURE_OPENAI_TRANSLATION_RESPONSE = OPENAI_TRANSLATION_RESPONSE - -ANTHROPIC_TRANSLATION_RESPONSE = { - "content": [ - { - "text": '["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"]' - } - ] -} - -DEEPSEEK_TRANSLATION_RESPONSE = { - "choices": [ - { - "message": { - "content": "```json\n[\"Bonjour\", \"Monde\", \"Bienvenue dans notre application\", \"Au revoir\"]\n```" - } - } - ] -} - - @pytest.fixture def temp_po_file(tmp_path: str) -> str: """Create a temporary PO file for testing.""" @@ -227,7 +196,7 @@ def translation_service_deepseek(translation_config_deepseek: TranslationConfig) @patch('requests.get') -def test_get_openai_models(mock_get, mock_provider_clients: ProviderClients): +def test_get_openai_models(mock_get: MagicMock, mock_provider_clients: ProviderClients): """Test getting OpenAI models.""" # Setup mock response mock_response = MagicMock() @@ -249,7 +218,7 @@ def test_get_openai_models(mock_get, mock_provider_clients: ProviderClients): @patch('requests.get') -def test_get_ayure_openai_models(mock_get, mock_provider_clients: ProviderClients): +def test_get_ayure_openai_models(mock_get: MagicMock, mock_provider_clients: ProviderClients): """Test getting OpenAI models.""" # Setup mock response mock_response = MagicMock() @@ -312,14 +281,8 @@ def test_get_deepseek_models(mock_provider_clients: ProviderClients): assert "deepseek-coder" in models -@patch('python_gpt_po.services.translation_service.requests.post') -def test_translate_bulk_openai(mock_post, translation_service_openai: TranslationService): +def test_translate_bulk_openai(translation_service_openai: TranslationService): """Test bulk translation with OpenAI.""" - # Setup mock response - mock_response = MagicMock() - mock_response.json.return_value = OPENAI_TRANSLATION_RESPONSE - mock_post.return_value = mock_response - # Call function translation_service_openai.config.provider_clients.openai_client.chat.completions.create.return_value = ( MagicMock( @@ -340,14 +303,8 @@ def test_translate_bulk_openai(mock_post, translation_service_openai: Translatio assert translations == ["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"] -@patch('python_gpt_po.services.translation_service.requests.post') -def test_translate_bulk_azure_openai(mock_post, translation_service_azure_openai: TranslationService): +def test_translate_bulk_azure_openai(translation_service_azure_openai: TranslationService): """Test bulk translation with OpenAI.""" - # Setup mock response - mock_response = MagicMock() - mock_response.json.return_value = AZURE_OPENAI_TRANSLATION_RESPONSE - mock_post.return_value = mock_response - # Call function azure_client = translation_service_azure_openai.config.provider_clients.azure_openai_client azure_response = azure_client.chat.completions.create @@ -368,8 +325,7 @@ def test_translate_bulk_azure_openai(mock_post, translation_service_azure_openai assert translations == ["Bonjour", "Monde", "Bienvenue dans notre application", "Au revoir"] -@patch('python_gpt_po.services.translation_service.requests.post') -def test_translate_bulk_anthropic(mock_post, translation_service_anthropic: TranslationService): +def test_translate_bulk_anthropic(translation_service_anthropic: TranslationService): """Test bulk translation with Anthropic.""" # Setup mock client response translation_service_anthropic.config.provider_clients.anthropic_client.messages.create.return_value = MagicMock( @@ -428,7 +384,7 @@ def test_clean_json_response(translation_service_deepseek: TranslationService): @patch('polib.pofile') -def test_process_po_file_all_providers(mock_pofile, +def test_process_po_file_all_providers(mock_pofile: MagicMock, translation_service_openai: TranslationService, translation_service_anthropic: TranslationService, translation_service_deepseek: TranslationService, @@ -475,7 +431,7 @@ def test_process_po_file_all_providers(mock_pofile, @patch('python_gpt_po.services.po_file_handler.POFileHandler.disable_fuzzy_translations') -def test_fuzzy_flag_handling(mock_disable_fuzzy, translation_service_openai: TranslationService, temp_po_file): +def test_fuzzy_flag_handling(mock_disable_fuzzy: MagicMock, translation_service_openai: TranslationService, temp_po_file: MagicMock): """Test handling of fuzzy translations.""" # Enable fuzzy flag translation_service_openai.config.flags.fuzzy = True @@ -533,7 +489,7 @@ def test_validation_model_connection_all_providers( @patch('os.walk') @patch('polib.pofile') -def test_scan_and_process_po_files(mock_pofile, mock_walk, translation_service_openai: TranslationService): +def test_scan_and_process_po_files(mock_pofile: MagicMock, mock_walk: MagicMock, translation_service_openai: TranslationService): """Test scanning and processing PO files.""" # Setup mock directory structure mock_walk.return_value = [ diff --git a/python_gpt_po/tests/test_po_translator.py b/python_gpt_po/tests/test_po_translator.py index f628791..c3ef36b 100644 --- a/python_gpt_po/tests/test_po_translator.py +++ b/python_gpt_po/tests/test_po_translator.py @@ -28,7 +28,7 @@ def fixture_mock_openai_client(): @pytest.fixture(name='translation_config') -def fixture_translation_config(mock_openai_client): +def fixture_translation_config(mock_openai_client: MagicMock): """ Fixture to create a TranslationConfig instance. """ @@ -50,7 +50,7 @@ def fixture_translation_config(mock_openai_client): @pytest.fixture(name='translation_service') -def fixture_translation_service(translation_config): +def fixture_translation_service(translation_config: MagicMock): """ Fixture to create a TranslationService instance. """ @@ -65,14 +65,14 @@ def fixture_mock_po_file_handler(): return MagicMock(spec=POFileHandler) -def test_validate_openai_connection(translation_service): +def test_validate_openai_connection(translation_service: MagicMock): """Test to validate the connection.""" # The new method is validate_provider_connection instead of validate_openai_connection assert translation_service.validate_provider_connection() is True @patch('python_gpt_po.services.po_file_handler.POFileHandler') -def test_process_po_file(mock_po_file_handler_class, translation_service, tmp_path): +def test_process_po_file(mock_po_file_handler_class: MagicMock, translation_service: MagicMock, tmp_path: MagicMock): """ Test the process_po_file method. """ @@ -131,7 +131,7 @@ def mock_prepare(po_file_path, languages): translation_service._prepare_po_file = original_prepare -def test_translate_bulk(translation_service, tmp_path): +def test_translate_bulk(translation_service: MagicMock, tmp_path: MagicMock): """Test the bulk translation functionality.""" texts_to_translate = ["HR", "TENANT", "HEALTHCARE", "TRANSPORT", "SERVICES"] po_file_path = str(tmp_path / "django.po") @@ -145,7 +145,7 @@ def test_translate_bulk(translation_service, tmp_path): assert translated_texts == ["HR", "Inquilino", "Salud", "Transporte", "Servicios"] -def test_translate_single(translation_service): +def test_translate_single(translation_service: MagicMock): """Test the single translation functionality.""" text_to_translate = "HEALTHCARE" diff --git a/requirements.txt b/requirements.txt index 6701fa0..958179e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,4 +7,5 @@ setuptools-scm==8.1.0 pycountry==24.6.1 anthropic==0.48.0 requests==2.32.3 -responses==0.25.6 \ No newline at end of file +responses==0.25.6 +isort==6.0.1